From 609e9d0f9a38992a6ee6bc2562e7e5143bcc1e6d Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Fri, 26 Sep 2025 23:01:45 +0000 Subject: [PATCH 1/3] update sdk --- .codegen.json | 3 +- .codegen/_openapi_sha | 2 +- databricks/sdk/__init__.py | 159 +- databricks/sdk/service/agentbricks.py | 6 +- databricks/sdk/service/apps.py | 537 +--- databricks/sdk/service/catalog.py | 1257 ++------ databricks/sdk/service/cleanrooms.py | 46 +- databricks/sdk/service/compute.py | 501 +++- databricks/sdk/service/dashboards.py | 630 +++- databricks/sdk/service/database.py | 246 +- databricks/sdk/service/iam.py | 3045 ++++---------------- databricks/sdk/service/iamv2.py | 1332 +++++++-- databricks/sdk/service/jobs.py | 193 +- databricks/sdk/service/ml.py | 491 +--- databricks/sdk/service/oauth2.py | 22 +- databricks/sdk/service/pipelines.py | 194 +- databricks/sdk/service/qualitymonitorv2.py | 14 + databricks/sdk/service/serving.py | 86 - databricks/sdk/service/settings.py | 108 +- databricks/sdk/service/settingsv2.py | 468 +-- databricks/sdk/service/sharing.py | 106 +- databricks/sdk/service/sql.py | 83 +- databricks/sdk/service/tags.py | 248 +- databricks/sdk/service/vectorsearch.py | 34 +- databricks/sdk/service/workspace.py | 17 +- 25 files changed, 3787 insertions(+), 6041 deletions(-) diff --git a/.codegen.json b/.codegen.json index 65077c1cc..0cf0321a5 100644 --- a/.codegen.json +++ b/.codegen.json @@ -16,8 +16,7 @@ "post_generate": [ "make fmt", "pytest -m 'not integration' --cov=databricks --cov-report html tests", - "pip install .", - "python3.12 docs/gen-client-docs.py" + "pip install ." ] } } diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 27c63e442..2db0598a6 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -608df7153d64c19e2d255144c9935fd4ed45900a \ No newline at end of file +universe:/home/parth.bansal/universe \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index ef42d1c34..101013cb5 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -40,7 +40,7 @@ from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace from databricks.sdk.service.agentbricks import AgentBricksAPI -from databricks.sdk.service.apps import AppsAPI, AppsSettingsAPI +from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, BudgetsAPI, LogDeliveryAPI, UsageDashboardsAPI) @@ -55,13 +55,13 @@ ExternalMetadataAPI, FunctionsAPI, GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, - PoliciesAPI, QualityMonitorsAPI, + QualityMonitorsAPI, RegisteredModelsAPI, - ResourceQuotasAPI, RfaAPI, - SchemasAPI, StorageCredentialsAPI, + RequestForAccessAPI, + ResourceQuotasAPI, SchemasAPI, + StorageCredentialsAPI, SystemSchemasAPI, TableConstraintsAPI, TablesAPI, - TemporaryPathCredentialsAPI, TemporaryTableCredentialsAPI, VolumesAPI, WorkspaceBindingsAPI) from databricks.sdk.service.cleanrooms import (CleanRoomAssetRevisionsAPI, @@ -77,21 +77,19 @@ PolicyComplianceForClustersAPI, PolicyFamiliesAPI) from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, - LakeviewEmbeddedAPI) + LakeviewEmbeddedAPI, + QueryExecutionAPI) from databricks.sdk.service.database import DatabaseAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccessControlAPI, AccountAccessControlAPI, AccountAccessControlProxyAPI, - AccountGroupsAPI, AccountGroupsV2API, + AccountGroupsAPI, AccountServicePrincipalsAPI, - AccountServicePrincipalsV2API, - AccountUsersAPI, AccountUsersV2API, - CurrentUserAPI, GroupsAPI, GroupsV2API, - PermissionMigrationAPI, PermissionsAPI, - ServicePrincipalsAPI, - ServicePrincipalsV2API, UsersAPI, - UsersV2API, WorkspaceAssignmentAPI) + AccountUsersAPI, CurrentUserAPI, + GroupsAPI, PermissionMigrationAPI, + PermissionsAPI, ServicePrincipalsAPI, + UsersAPI, WorkspaceAssignmentAPI) from databricks.sdk.service.iamv2 import AccountIamV2API, WorkspaceIamV2API from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI from databricks.sdk.service.marketplace import ( @@ -100,9 +98,8 @@ ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI, ProviderListingsAPI, ProviderPersonalizationRequestsAPI, ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) -from databricks.sdk.service.ml import (ExperimentsAPI, FeatureEngineeringAPI, - FeatureStoreAPI, ForecastingAPI, - MaterializedFeaturesAPI, +from databricks.sdk.service.ml import (ExperimentsAPI, FeatureStoreAPI, + ForecastingAPI, MaterializedFeaturesAPI, ModelRegistryAPI) from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, CustomAppIntegrationAPI, @@ -152,7 +149,7 @@ QueryVisualizationsLegacyAPI, RedashConfigAPI, StatementExecutionAPI, WarehousesAPI) -from databricks.sdk.service.tags import TagPoliciesAPI +from databricks.sdk.service.tags import TagAssignmentsAPI, TagPoliciesAPI from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, @@ -260,7 +257,6 @@ def __init__( self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client) self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client) self._apps = pkg_apps.AppsAPI(self._api_client) - self._apps_settings = pkg_apps.AppsSettingsAPI(self._api_client) self._artifact_allowlists = pkg_catalog.ArtifactAllowlistsAPI(self._api_client) self._catalogs = pkg_catalog.CatalogsAPI(self._api_client) self._clean_room_asset_revisions = pkg_cleanrooms.CleanRoomAssetRevisionsAPI(self._api_client) @@ -291,7 +287,6 @@ def __init__( self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client) self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client) self._external_metadata = pkg_catalog.ExternalMetadataAPI(self._api_client) - self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) self._functions = pkg_catalog.FunctionsAPI(self._api_client) @@ -299,7 +294,7 @@ def __init__( self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client) self._global_init_scripts = pkg_compute.GlobalInitScriptsAPI(self._api_client) self._grants = pkg_catalog.GrantsAPI(self._api_client) - self._groups_v2 = pkg_iam.GroupsV2API(self._api_client) + self._groups = pkg_iam.GroupsAPI(self._api_client) self._instance_pools = pkg_compute.InstancePoolsAPI(self._api_client) self._instance_profiles = pkg_compute.InstanceProfilesAPI(self._api_client) self._ip_access_lists = pkg_settings.IpAccessListsAPI(self._api_client) @@ -316,7 +311,6 @@ def __init__( self._permission_migration = pkg_iam.PermissionMigrationAPI(self._api_client) self._permissions = pkg_iam.PermissionsAPI(self._api_client) self._pipelines = pkg_pipelines.PipelinesAPI(self._api_client) - self._policies = pkg_catalog.PoliciesAPI(self._api_client) self._policy_compliance_for_clusters = pkg_compute.PolicyComplianceForClustersAPI(self._api_client) self._policy_compliance_for_jobs = pkg_jobs.PolicyComplianceForJobsAPI(self._api_client) self._policy_families = pkg_compute.PolicyFamiliesAPI(self._api_client) @@ -334,6 +328,7 @@ def __init__( self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client) self._queries = pkg_sql.QueriesAPI(self._api_client) self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client) + self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client) self._query_history = pkg_sql.QueryHistoryAPI(self._api_client) self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client) self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client) @@ -343,12 +338,12 @@ def __init__( self._redash_config = pkg_sql.RedashConfigAPI(self._api_client) self._registered_models = pkg_catalog.RegisteredModelsAPI(self._api_client) self._repos = pkg_workspace.ReposAPI(self._api_client) + self._request_for_access = pkg_catalog.RequestForAccessAPI(self._api_client) self._resource_quotas = pkg_catalog.ResourceQuotasAPI(self._api_client) - self._rfa = pkg_catalog.RfaAPI(self._api_client) self._schemas = pkg_catalog.SchemasAPI(self._api_client) self._secrets = pkg_workspace.SecretsAPI(self._api_client) self._service_principal_secrets_proxy = pkg_oauth2.ServicePrincipalSecretsProxyAPI(self._api_client) - self._service_principals_v2 = pkg_iam.ServicePrincipalsV2API(self._api_client) + self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints serving_endpoints_data_plane_token_source = DataPlaneTokenSource( self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh @@ -363,12 +358,12 @@ def __init__( self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client) self._table_constraints = pkg_catalog.TableConstraintsAPI(self._api_client) self._tables = pkg_catalog.TablesAPI(self._api_client) + self._tag_assignments = pkg_tags.TagAssignmentsAPI(self._api_client) self._tag_policies = pkg_tags.TagPoliciesAPI(self._api_client) - self._temporary_path_credentials = pkg_catalog.TemporaryPathCredentialsAPI(self._api_client) self._temporary_table_credentials = pkg_catalog.TemporaryTableCredentialsAPI(self._api_client) self._token_management = pkg_settings.TokenManagementAPI(self._api_client) self._tokens = pkg_settings.TokensAPI(self._api_client) - self._users_v2 = pkg_iam.UsersV2API(self._api_client) + self._users = pkg_iam.UsersAPI(self._api_client) self._vector_search_endpoints = pkg_vectorsearch.VectorSearchEndpointsAPI(self._api_client) self._vector_search_indexes = pkg_vectorsearch.VectorSearchIndexesAPI(self._api_client) self._volumes = pkg_catalog.VolumesAPI(self._api_client) @@ -379,9 +374,6 @@ def __init__( self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client) - self._groups = pkg_iam.GroupsAPI(self._api_client) - self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) - self._users = pkg_iam.UsersAPI(self._api_client) @property def config(self) -> client.Config: @@ -430,11 +422,6 @@ def apps(self) -> pkg_apps.AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps - @property - def apps_settings(self) -> pkg_apps.AppsSettingsAPI: - """Apps Settings manage the settings for the Apps service on a customer's Databricks instance.""" - return self._apps_settings - @property def artifact_allowlists(self) -> pkg_catalog.ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" @@ -562,7 +549,7 @@ def dbsql_permissions(self) -> pkg_sql.DbsqlPermissionsAPI: @property def entity_tag_assignments(self) -> pkg_catalog.EntityTagAssignmentsAPI: - """Tags are attributes that include keys and optional values that you can use to organize and categorize entities in Unity Catalog.""" + """Entity Tag Assignments provide a unified interface for managing tag assignments on Unity Catalog entities.""" return self._entity_tag_assignments @property @@ -585,11 +572,6 @@ def external_metadata(self) -> pkg_catalog.ExternalMetadataAPI: """External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog.""" return self._external_metadata - @property - def feature_engineering(self) -> pkg_ml.FeatureEngineeringAPI: - """[description].""" - return self._feature_engineering - @property def feature_store(self) -> pkg_ml.FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features.""" @@ -626,9 +608,9 @@ def grants(self) -> pkg_catalog.GrantsAPI: return self._grants @property - def groups_v2(self) -> pkg_iam.GroupsV2API: + def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" - return self._groups_v2 + return self._groups @property def instance_pools(self) -> pkg_compute.InstancePoolsAPI: @@ -710,11 +692,6 @@ def pipelines(self) -> pkg_pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines - @property - def policies(self) -> pkg_catalog.PoliciesAPI: - """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies in Unity Catalog.""" - return self._policies - @property def policy_compliance_for_clusters(self) -> pkg_compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" @@ -790,6 +767,11 @@ def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy + @property + def query_execution(self) -> pkg_dashboards.QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards.""" + return self._query_execution + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" @@ -835,16 +817,16 @@ def repos(self) -> pkg_workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos + @property + def request_for_access(self) -> pkg_catalog.RequestForAccessAPI: + """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" + return self._request_for_access + @property def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" return self._resource_quotas - @property - def rfa(self) -> pkg_catalog.RfaAPI: - """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" - return self._rfa - @property def schemas(self) -> pkg_catalog.SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" @@ -861,9 +843,9 @@ def service_principal_secrets_proxy(self) -> pkg_oauth2.ServicePrincipalSecretsP return self._service_principal_secrets_proxy @property - def service_principals_v2(self) -> pkg_iam.ServicePrincipalsV2API: + def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals_v2 + return self._service_principals @property def serving_endpoints(self) -> ServingEndpointsExt: @@ -911,18 +893,18 @@ def tables(self) -> pkg_catalog.TablesAPI: return self._tables @property - def tag_policies(self) -> pkg_tags.TagPoliciesAPI: - """The Tag Policy API allows you to manage policies for governed tags in Databricks.""" - return self._tag_policies + def tag_assignments(self) -> pkg_tags.TagAssignmentsAPI: + """Manage tag assignments on workspace-scoped objects.""" + return self._tag_assignments @property - def temporary_path_credentials(self) -> pkg_catalog.TemporaryPathCredentialsAPI: - """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud storage locations registered in Databricks.""" - return self._temporary_path_credentials + def tag_policies(self) -> pkg_tags.TagPoliciesAPI: + """The Tag Policy API allows you to manage tag policies in Databricks.""" + return self._tag_policies @property def temporary_table_credentials(self) -> pkg_catalog.TemporaryTableCredentialsAPI: - """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locations where table data is stored in Databricks.""" + """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" return self._temporary_table_credentials @property @@ -936,9 +918,9 @@ def tokens(self) -> pkg_settings.TokensAPI: return self._tokens @property - def users_v2(self) -> pkg_iam.UsersV2API: + def users(self) -> pkg_iam.UsersAPI: """User identities recognized by Databricks and represented by email addresses.""" - return self._users_v2 + return self._users @property def vector_search_endpoints(self) -> pkg_vectorsearch.VectorSearchEndpointsAPI: @@ -990,21 +972,6 @@ def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" return self._workspace_iam_v2 - @property - def groups(self) -> pkg_iam.GroupsAPI: - """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" - return self._groups - - @property - def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: - """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals - - @property - def users(self) -> pkg_iam.UsersAPI: - """User identities recognized by Databricks and represented by email addresses.""" - return self._users - def get_workspace_id(self) -> int: """Get the workspace ID of the workspace that this client is connected to.""" response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"]) @@ -1086,7 +1053,7 @@ def __init__( self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client) self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client) self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client) - self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client) + self._groups = pkg_iam.AccountGroupsAPI(self._api_client) self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client) self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client) self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client) @@ -1099,22 +1066,19 @@ def __init__( self._published_app_integration = pkg_oauth2.PublishedAppIntegrationAPI(self._api_client) self._service_principal_federation_policy = pkg_oauth2.ServicePrincipalFederationPolicyAPI(self._api_client) self._service_principal_secrets = pkg_oauth2.ServicePrincipalSecretsAPI(self._api_client) - self._service_principals_v2 = pkg_iam.AccountServicePrincipalsV2API(self._api_client) + self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) self._settings = pkg_settings.AccountSettingsAPI(self._api_client) self._settings_v2 = pkg_settingsv2.AccountSettingsV2API(self._api_client) self._storage = pkg_provisioning.StorageAPI(self._api_client) self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client) self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client) - self._users_v2 = pkg_iam.AccountUsersV2API(self._api_client) + self._users = pkg_iam.AccountUsersAPI(self._api_client) self._vpc_endpoints = pkg_provisioning.VpcEndpointsAPI(self._api_client) self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client) self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client) self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) self._budgets = pkg_billing.BudgetsAPI(self._api_client) - self._groups = pkg_iam.AccountGroupsAPI(self._api_client) - self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) - self._users = pkg_iam.AccountUsersAPI(self._api_client) @property def config(self) -> client.Config: @@ -1160,9 +1124,9 @@ def federation_policy(self) -> pkg_oauth2.AccountFederationPolicyAPI: return self._federation_policy @property - def groups_v2(self) -> pkg_iam.AccountGroupsV2API: + def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" - return self._groups_v2 + return self._groups @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: @@ -1225,9 +1189,9 @@ def service_principal_secrets(self) -> pkg_oauth2.ServicePrincipalSecretsAPI: return self._service_principal_secrets @property - def service_principals_v2(self) -> pkg_iam.AccountServicePrincipalsV2API: + def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals_v2 + return self._service_principals @property def settings(self) -> pkg_settings.AccountSettingsAPI: @@ -1255,9 +1219,9 @@ def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: return self._usage_dashboards @property - def users_v2(self) -> pkg_iam.AccountUsersV2API: + def users(self) -> pkg_iam.AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" - return self._users_v2 + return self._users @property def vpc_endpoints(self) -> pkg_provisioning.VpcEndpointsAPI: @@ -1289,21 +1253,6 @@ def budgets(self) -> pkg_billing.BudgetsAPI: """These APIs manage budget configurations for this account.""" return self._budgets - @property - def groups(self) -> pkg_iam.AccountGroupsAPI: - """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" - return self._groups - - @property - def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: - """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals - - @property - def users(self) -> pkg_iam.AccountUsersAPI: - """User identities recognized by Databricks and represented by email addresses.""" - return self._users - def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. diff --git a/databricks/sdk/service/agentbricks.py b/databricks/sdk/service/agentbricks.py index 25175acf0..8cda7ac26 100755 --- a/databricks/sdk/service/agentbricks.py +++ b/databricks/sdk/service/agentbricks.py @@ -227,9 +227,9 @@ def create_custom_llm( :param instructions: str Instructions for the custom LLM to follow :param agent_artifact_path: str (optional) - This will soon be deprecated!! Optional: UC path for agent artifacts. If you are using a dataset - that you only have read permissions, please provide a destination path where you have write - permissions. Please provide this in catalog.schema format. + Optional: UC path for agent artifacts. If you are using a dataset that you only have read + permissions, please provide a destination path where you have write permissions. Please provide this + in catalog.schema format. :param datasets: List[:class:`Dataset`] (optional) Datasets used for training and evaluating the model, not for inference. Currently, only 1 dataset is accepted. diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index aeedb7146..99d60a4dd 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -32,6 +32,8 @@ class App: app_status: Optional[ApplicationStatus] = None budget_policy_id: Optional[str] = None + """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the + new usage_policy_id is properly populated and used.""" compute_status: Optional[ComputeStatus] = None @@ -49,6 +51,10 @@ class App: """The description of the app.""" effective_budget_policy_id: Optional[str] = None + """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the + new usage_policy_id is properly populated and used.""" + + effective_usage_policy_id: Optional[str] = None effective_user_api_scopes: Optional[List[str]] = None """The effective api scopes granted to the user access token.""" @@ -82,6 +88,8 @@ class App: url: Optional[str] = None """The URL of the app once it is deployed.""" + usage_policy_id: Optional[str] = None + user_api_scopes: Optional[List[str]] = None def as_dict(self) -> dict: @@ -105,6 +113,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.effective_user_api_scopes: body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes] if self.id is not None: @@ -131,6 +141,8 @@ def as_dict(self) -> dict: body["updater"] = self.updater if self.url is not None: body["url"] = self.url + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.user_api_scopes: body["user_api_scopes"] = [v for v in self.user_api_scopes] return body @@ -156,6 +168,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.effective_user_api_scopes: body["effective_user_api_scopes"] = self.effective_user_api_scopes if self.id is not None: @@ -182,6 +196,8 @@ def as_shallow_dict(self) -> dict: body["updater"] = self.updater if self.url is not None: body["url"] = self.url + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.user_api_scopes: body["user_api_scopes"] = self.user_api_scopes return body @@ -199,6 +215,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: default_source_code_path=d.get("default_source_code_path", None), description=d.get("description", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), effective_user_api_scopes=d.get("effective_user_api_scopes", None), id=d.get("id", None), name=d.get("name", None), @@ -212,6 +229,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: update_time=d.get("update_time", None), updater=d.get("updater", None), url=d.get("url", None), + usage_policy_id=d.get("usage_policy_id", None), user_api_scopes=d.get("user_api_scopes", None), ) @@ -483,312 +501,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentStatus: return cls(message=d.get("message", None), state=_enum(d, "state", AppDeploymentState)) -@dataclass -class AppManifest: - """App manifest definition""" - - version: int - """The manifest schema version, for now only 1 is allowed""" - - name: str - """Name of the app defined by manifest author / publisher""" - - description: Optional[str] = None - """Description of the app defined by manifest author / publisher""" - - resource_specs: Optional[List[AppManifestAppResourceSpec]] = None - - def as_dict(self) -> dict: - """Serializes the AppManifest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.resource_specs: - body["resource_specs"] = [v.as_dict() for v in self.resource_specs] - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.resource_specs: - body["resource_specs"] = self.resource_specs - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifest: - """Deserializes the AppManifest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - resource_specs=_repeated_dict(d, "resource_specs", AppManifestAppResourceSpec), - version=d.get("version", None), - ) - - -@dataclass -class AppManifestAppResourceJobSpec: - permission: AppManifestAppResourceJobSpecJobPermission - """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", - "CAN_MANAGE_RUN", "CAN_VIEW".""" - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceJobSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceJobSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceJobSpec: - """Deserializes the AppManifestAppResourceJobSpec from a dictionary.""" - return cls(permission=_enum(d, "permission", AppManifestAppResourceJobSpecJobPermission)) - - -class AppManifestAppResourceJobSpecJobPermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_RUN = "CAN_MANAGE_RUN" - CAN_VIEW = "CAN_VIEW" - IS_OWNER = "IS_OWNER" - - -@dataclass -class AppManifestAppResourceSecretSpec: - permission: AppManifestAppResourceSecretSpecSecretPermission - """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission - must be one of: "READ", "WRITE", "MANAGE".""" - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceSecretSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceSecretSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSecretSpec: - """Deserializes the AppManifestAppResourceSecretSpec from a dictionary.""" - return cls(permission=_enum(d, "permission", AppManifestAppResourceSecretSpecSecretPermission)) - - -class AppManifestAppResourceSecretSpecSecretPermission(Enum): - """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".""" - - MANAGE = "MANAGE" - READ = "READ" - WRITE = "WRITE" - - -@dataclass -class AppManifestAppResourceServingEndpointSpec: - permission: AppManifestAppResourceServingEndpointSpecServingEndpointPermission - """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", - "CAN_QUERY", "CAN_VIEW".""" - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceServingEndpointSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceServingEndpointSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceServingEndpointSpec: - """Deserializes the AppManifestAppResourceServingEndpointSpec from a dictionary.""" - return cls( - permission=_enum(d, "permission", AppManifestAppResourceServingEndpointSpecServingEndpointPermission) - ) - - -class AppManifestAppResourceServingEndpointSpecServingEndpointPermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_QUERY = "CAN_QUERY" - CAN_VIEW = "CAN_VIEW" - - -@dataclass -class AppManifestAppResourceSpec: - """AppResource related fields are copied from app.proto but excludes resource identifiers (e.g. - name, id, key, scope, etc.)""" - - name: str - """Name of the App Resource.""" - - description: Optional[str] = None - """Description of the App Resource.""" - - job_spec: Optional[AppManifestAppResourceJobSpec] = None - - secret_spec: Optional[AppManifestAppResourceSecretSpec] = None - - serving_endpoint_spec: Optional[AppManifestAppResourceServingEndpointSpec] = None - - sql_warehouse_spec: Optional[AppManifestAppResourceSqlWarehouseSpec] = None - - uc_securable_spec: Optional[AppManifestAppResourceUcSecurableSpec] = None - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.name is not None: - body["name"] = self.name - if self.secret_spec: - body["secret_spec"] = self.secret_spec.as_dict() - if self.serving_endpoint_spec: - body["serving_endpoint_spec"] = self.serving_endpoint_spec.as_dict() - if self.sql_warehouse_spec: - body["sql_warehouse_spec"] = self.sql_warehouse_spec.as_dict() - if self.uc_securable_spec: - body["uc_securable_spec"] = self.uc_securable_spec.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.job_spec: - body["job_spec"] = self.job_spec - if self.name is not None: - body["name"] = self.name - if self.secret_spec: - body["secret_spec"] = self.secret_spec - if self.serving_endpoint_spec: - body["serving_endpoint_spec"] = self.serving_endpoint_spec - if self.sql_warehouse_spec: - body["sql_warehouse_spec"] = self.sql_warehouse_spec - if self.uc_securable_spec: - body["uc_securable_spec"] = self.uc_securable_spec - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSpec: - """Deserializes the AppManifestAppResourceSpec from a dictionary.""" - return cls( - description=d.get("description", None), - job_spec=_from_dict(d, "job_spec", AppManifestAppResourceJobSpec), - name=d.get("name", None), - secret_spec=_from_dict(d, "secret_spec", AppManifestAppResourceSecretSpec), - serving_endpoint_spec=_from_dict(d, "serving_endpoint_spec", AppManifestAppResourceServingEndpointSpec), - sql_warehouse_spec=_from_dict(d, "sql_warehouse_spec", AppManifestAppResourceSqlWarehouseSpec), - uc_securable_spec=_from_dict(d, "uc_securable_spec", AppManifestAppResourceUcSecurableSpec), - ) - - -@dataclass -class AppManifestAppResourceSqlWarehouseSpec: - permission: AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission - """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", - "IS_OWNER".""" - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceSqlWarehouseSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceSqlWarehouseSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSqlWarehouseSpec: - """Deserializes the AppManifestAppResourceSqlWarehouseSpec from a dictionary.""" - return cls(permission=_enum(d, "permission", AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission)) - - -class AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_USE = "CAN_USE" - IS_OWNER = "IS_OWNER" - - -@dataclass -class AppManifestAppResourceUcSecurableSpec: - securable_type: AppManifestAppResourceUcSecurableSpecUcSecurableType - - permission: AppManifestAppResourceUcSecurableSpecUcSecurablePermission - - def as_dict(self) -> dict: - """Serializes the AppManifestAppResourceUcSecurableSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.securable_type is not None: - body["securable_type"] = self.securable_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppManifestAppResourceUcSecurableSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceUcSecurableSpec: - """Deserializes the AppManifestAppResourceUcSecurableSpec from a dictionary.""" - return cls( - permission=_enum(d, "permission", AppManifestAppResourceUcSecurableSpecUcSecurablePermission), - securable_type=_enum(d, "securable_type", AppManifestAppResourceUcSecurableSpecUcSecurableType), - ) - - -class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum): - - MANAGE = "MANAGE" - READ_VOLUME = "READ_VOLUME" - WRITE_VOLUME = "WRITE_VOLUME" - - -class AppManifestAppResourceUcSecurableSpecUcSecurableType(Enum): - - VOLUME = "VOLUME" - - @dataclass class AppPermission: inherited: Optional[bool] = None @@ -1342,81 +1054,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ComputeStatus: return cls(message=d.get("message", None), state=_enum(d, "state", ComputeState)) -@dataclass -class CustomTemplate: - name: str - """The name of the template. It must contain only alphanumeric characters, hyphens, underscores, - and whitespaces. It must be unique within the workspace.""" - - git_repo: str - """The Git repository URL that the template resides in.""" - - path: str - """The path to the template within the Git repository.""" - - manifest: AppManifest - """The manifest of the template. It defines fields and default values when installing the template.""" - - git_provider: str - """The Git provider of the template.""" - - creator: Optional[str] = None - - description: Optional[str] = None - """The description of the template.""" - - def as_dict(self) -> dict: - """Serializes the CustomTemplate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.description is not None: - body["description"] = self.description - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_repo is not None: - body["git_repo"] = self.git_repo - if self.manifest: - body["manifest"] = self.manifest.as_dict() - if self.name is not None: - body["name"] = self.name - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CustomTemplate into a shallow dictionary of its immediate attributes.""" - body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.description is not None: - body["description"] = self.description - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_repo is not None: - body["git_repo"] = self.git_repo - if self.manifest: - body["manifest"] = self.manifest - if self.name is not None: - body["name"] = self.name - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CustomTemplate: - """Deserializes the CustomTemplate from a dictionary.""" - return cls( - creator=d.get("creator", None), - description=d.get("description", None), - git_provider=d.get("git_provider", None), - git_repo=d.get("git_repo", None), - manifest=_from_dict(d, "manifest", AppManifest), - name=d.get("name", None), - path=d.get("path", None), - ) - - @dataclass class GetAppPermissionLevelsResponse: permission_levels: Optional[List[AppPermissionsDescription]] = None @@ -1508,39 +1145,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) -@dataclass -class ListCustomTemplatesResponse: - next_page_token: Optional[str] = None - """Pagination token to request the next page of custom templates.""" - - templates: Optional[List[CustomTemplate]] = None - - def as_dict(self) -> dict: - """Serializes the ListCustomTemplatesResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.templates: - body["templates"] = [v.as_dict() for v in self.templates] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListCustomTemplatesResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.templates: - body["templates"] = self.templates - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListCustomTemplatesResponse: - """Deserializes the ListCustomTemplatesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), templates=_repeated_dict(d, "templates", CustomTemplate) - ) - - class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -1956,108 +1560,3 @@ def update_permissions( res = self._api.do("PATCH", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) return AppPermissions.from_dict(res) - - -class AppsSettingsAPI: - """Apps Settings manage the settings for the Apps service on a customer's Databricks instance.""" - - def __init__(self, api_client): - self._api = api_client - - def create_custom_template(self, template: CustomTemplate) -> CustomTemplate: - """Creates a custom template. - - :param template: :class:`CustomTemplate` - - :returns: :class:`CustomTemplate` - """ - body = template.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/apps-settings/templates", body=body, headers=headers) - return CustomTemplate.from_dict(res) - - def delete_custom_template(self, name: str) -> CustomTemplate: - """Deletes the custom template with the specified name. - - :param name: str - The name of the custom template. - - :returns: :class:`CustomTemplate` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("DELETE", f"/api/2.0/apps-settings/templates/{name}", headers=headers) - return CustomTemplate.from_dict(res) - - def get_custom_template(self, name: str) -> CustomTemplate: - """Gets the custom template with the specified name. - - :param name: str - The name of the custom template. - - :returns: :class:`CustomTemplate` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/apps-settings/templates/{name}", headers=headers) - return CustomTemplate.from_dict(res) - - def list_custom_templates( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[CustomTemplate]: - """Lists all custom templates in the workspace. - - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of custom templates. Requests first page if absent. - - :returns: Iterator over :class:`CustomTemplate` - """ - - query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do("GET", "/api/2.0/apps-settings/templates", query=query, headers=headers) - if "templates" in json: - for v in json["templates"]: - yield CustomTemplate.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_custom_template(self, name: str, template: CustomTemplate) -> CustomTemplate: - """Updates the custom template with the specified name. Note that the template name cannot be updated. - - :param name: str - The name of the template. It must contain only alphanumeric characters, hyphens, underscores, and - whitespaces. It must be unique within the workspace. - :param template: :class:`CustomTemplate` - - :returns: :class:`CustomTemplate` - """ - body = template.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/apps-settings/templates/{name}", body=body, headers=headers) - return CustomTemplate.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 3ac709a89..550e1103c 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1207,55 +1207,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) -@dataclass -class ColumnMaskOptions: - function_name: str - """The fully qualified name of the column mask function. The function is called on each row of the - target table. The function's first argument and its return type should match the type of the - masked column. Required on create and update.""" - - on_column: str - """The alias of the column to be masked. The alias must refer to one of matched columns. The values - of the column is passed to the column mask function as the first argument. Required on create - and update.""" - - using: Optional[List[FunctionArgument]] = None - """Optional list of column aliases or constant literals to be passed as additional arguments to the - column mask function. The type of each column should match the positional argument of the column - mask function.""" - - def as_dict(self) -> dict: - """Serializes the ColumnMaskOptions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.on_column is not None: - body["on_column"] = self.on_column - if self.using: - body["using"] = [v.as_dict() for v in self.using] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ColumnMaskOptions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.on_column is not None: - body["on_column"] = self.on_column - if self.using: - body["using"] = self.using - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ColumnMaskOptions: - """Deserializes the ColumnMaskOptions from a dictionary.""" - return cls( - function_name=d.get("function_name", None), - on_column=d.get("on_column", None), - using=_repeated_dict(d, "using", FunctionArgument), - ) - - @dataclass class ColumnRelationship: source: Optional[str] = None @@ -1363,6 +1314,9 @@ class ConnectionInfo: credential_type: Optional[CredentialType] = None """The type of credential.""" + environment_settings: Optional[EnvironmentSettings] = None + """[Create,Update:OPT] Connection environment settings as EnvironmentSettings object.""" + full_name: Optional[str] = None """Full name of connection.""" @@ -1412,6 +1366,8 @@ def as_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type.value + if self.environment_settings: + body["environment_settings"] = self.environment_settings.as_dict() if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1453,6 +1409,8 @@ def as_shallow_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type + if self.environment_settings: + body["environment_settings"] = self.environment_settings if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1489,6 +1447,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: created_at=d.get("created_at", None), created_by=d.get("created_by", None), credential_type=_enum(d, "credential_type", CredentialType), + environment_settings=_from_dict(d, "environment_settings", EnvironmentSettings), full_name=d.get("full_name", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), @@ -1505,7 +1464,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 38""" + """Next Id: 37""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1515,7 +1474,6 @@ class ConnectionType(Enum): HTTP = "HTTP" MYSQL = "MYSQL" ORACLE = "ORACLE" - PALANTIR = "PALANTIR" POSTGRESQL = "POSTGRESQL" POWER_BI = "POWER_BI" REDSHIFT = "REDSHIFT" @@ -2576,24 +2534,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteMonitorResponse: return cls() -@dataclass -class DeletePolicyResponse: - def as_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: - """Deserializes the DeletePolicyResponse from a dictionary.""" - return cls() - - @dataclass class DeleteRequestExternalLineage: source: ExternalLineageObject @@ -2893,6 +2833,8 @@ def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): + """The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" CATALOG = "CATALOG" SCHEMA = "SCHEMA" @@ -3032,15 +2974,16 @@ class EntityTagAssignment: """Represents a tag assignment to an entity""" entity_name: str - """The fully qualified name of the entity to which the tag is assigned""" + """Required. The fully qualified structured name of the entity to which the tag is assigned. The + entity name should follow the format of: entity_type/fully_qualified_entity_name. eg. + catalogs/my_catalog, schemas/my_catalog.my_schema, + columns/my_catalog.my_schema.my_table.my_column. When containing segments with special + characters (e.g. '/'), the whole segment must be wrapped with backticks. For example, + columns/catalog.schema.table.\`column/a\`""" tag_key: str """The key of the tag""" - entity_type: str - """The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - tables, columns, volumes.""" - tag_value: Optional[str] = None """The value of the tag""" @@ -3049,8 +2992,6 @@ def as_dict(self) -> dict: body = {} if self.entity_name is not None: body["entity_name"] = self.entity_name - if self.entity_type is not None: - body["entity_type"] = self.entity_type if self.tag_key is not None: body["tag_key"] = self.tag_key if self.tag_value is not None: @@ -3062,8 +3003,6 @@ def as_shallow_dict(self) -> dict: body = {} if self.entity_name is not None: body["entity_name"] = self.entity_name - if self.entity_type is not None: - body["entity_type"] = self.entity_type if self.tag_key is not None: body["tag_key"] = self.tag_key if self.tag_value is not None: @@ -3074,10 +3013,39 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EntityTagAssignment: """Deserializes the EntityTagAssignment from a dictionary.""" return cls( - entity_name=d.get("entity_name", None), - entity_type=d.get("entity_type", None), - tag_key=d.get("tag_key", None), - tag_value=d.get("tag_value", None), + entity_name=d.get("entity_name", None), tag_key=d.get("tag_key", None), tag_value=d.get("tag_value", None) + ) + + +@dataclass +class EnvironmentSettings: + environment_version: Optional[str] = None + + java_dependencies: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the EnvironmentSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = [v for v in self.java_dependencies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EnvironmentSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = self.java_dependencies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EnvironmentSettings: + """Deserializes the EnvironmentSettings from a dictionary.""" + return cls( + environment_version=d.get("environment_version", None), java_dependencies=d.get("java_dependencies", None) ) @@ -3651,8 +3619,7 @@ class ExternalLocationInfo: sufficient.""" file_event_queue: Optional[FileEventQueue] = None - """File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly - one of the documented properties.""" + """File event queue settings.""" isolation_mode: Optional[IsolationMode] = None @@ -4076,38 +4043,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: ) -@dataclass -class FunctionArgument: - alias: Optional[str] = None - """The alias of a matched column.""" - - constant: Optional[str] = None - """A constant literal.""" - - def as_dict(self) -> dict: - """Serializes the FunctionArgument into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.constant is not None: - body["constant"] = self.constant - return body - - def as_shallow_dict(self) -> dict: - """Serializes the FunctionArgument into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.constant is not None: - body["constant"] = self.constant - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionArgument: - """Deserializes the FunctionArgument from a dictionary.""" - return cls(alias=d.get("alias", None), constant=d.get("constant", None)) - - @dataclass class FunctionDependency: """A function that is dependent on a SQL object.""" @@ -4640,77 +4575,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: ) -@dataclass -class GenerateTemporaryPathCredentialResponse: - aws_temp_credentials: Optional[AwsCredentials] = None - - azure_aad: Optional[AzureActiveDirectoryToken] = None - - azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None - - expiration_time: Optional[int] = None - """Server time when the credential will expire, in epoch milliseconds. The API client is advised to - cache the credential given this expiration time.""" - - gcp_oauth_token: Optional[GcpOauthToken] = None - - r2_temp_credentials: Optional[R2Credentials] = None - - url: Optional[str] = None - """The URL of the storage path accessible by the temporary credential.""" - - def as_dict(self) -> dict: - """Serializes the GenerateTemporaryPathCredentialResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryPathCredentialResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryPathCredentialResponse: - """Deserializes the GenerateTemporaryPathCredentialResponse from a dictionary.""" - return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), - url=d.get("url", None), - ) - - @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """The Azure cloud options to customize the requested temporary credential""" @@ -5546,39 +5410,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: ) -@dataclass -class ListPoliciesResponse: - next_page_token: Optional[str] = None - """Optional opaque token for continuing pagination. `page_token` should be set to this value for - the next request to retrieve the next page of results.""" - - policies: Optional[List[PolicyInfo]] = None - """The list of retrieved policies.""" - - def as_dict(self) -> dict: - """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = [v.as_dict() for v in self.policies] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = self.policies - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: - """Deserializes the ListPoliciesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", PolicyInfo)) - - @dataclass class ListQuotasResponse: next_page_token: Optional[str] = None @@ -5849,38 +5680,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) -@dataclass -class MatchColumn: - alias: Optional[str] = None - """Optional alias of the matched column.""" - - condition: Optional[str] = None - """The condition expression used to match a table column.""" - - def as_dict(self) -> dict: - """Serializes the MatchColumn into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.condition is not None: - body["condition"] = self.condition - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MatchColumn into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.condition is not None: - body["condition"] = self.condition - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MatchColumn: - """Deserializes the MatchColumn from a dictionary.""" - return cls(alias=d.get("alias", None), condition=d.get("condition", None)) - - class MatchType(Enum): """The artifact pattern matching type""" @@ -7452,13 +7251,6 @@ class OptionSpecOptionType(Enum): OPTION_STRING = "OPTION_STRING" -class PathOperation(Enum): - - PATH_CREATE_TABLE = "PATH_CREATE_TABLE" - PATH_READ = "PATH_READ" - PATH_READ_WRITE = "PATH_READ_WRITE" - - @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None @@ -7468,6 +7260,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" @@ -7478,6 +7279,8 @@ def as_dict(self) -> dict: body["add"] = [v.value for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v.value for v in self.remove] return body @@ -7489,6 +7292,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -7499,6 +7304,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: return cls( add=_repeated_enum(d, "add", Privilege), principal=d.get("principal", None), + principal_id=d.get("principal_id", None), remove=_repeated_enum(d, "remove", Privilege), ) @@ -7566,220 +7372,48 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: @dataclass -class PolicyInfo: - to_principals: List[str] - """List of user or group names that the policy applies to. Required on create and optional on - update.""" - - for_securable_type: SecurableType - """Type of securables that the policy should take effect on. Only `TABLE` is supported at this - moment. Required on create and optional on update.""" - - policy_type: PolicyType - """Type of the policy. Required on create and ignored on update.""" - - column_mask: Optional[ColumnMaskOptions] = None - """Options for column mask policies. Valid only if `policy_type` is `POLICY_TYPE_COLUMN_MASK`. - Required on create and optional on update. When specified on update, the new options will - replace the existing options as a whole.""" - - comment: Optional[str] = None - """Optional description of the policy.""" - - created_at: Optional[int] = None - """Time at which the policy was created, in epoch milliseconds. Output only.""" - - created_by: Optional[str] = None - """Username of the user who created the policy. Output only.""" - - except_principals: Optional[List[str]] = None - """Optional list of user or group names that should be excluded from the policy.""" - - id: Optional[str] = None - """Unique identifier of the policy. This field is output only and is generated by the system.""" - - match_columns: Optional[List[MatchColumn]] = None - """Optional list of condition expressions used to match table columns. Only valid when - `for_securable_type` is `TABLE`. When specified, the policy only applies to tables whose columns - satisfy all match conditions.""" - - name: Optional[str] = None - """Name of the policy. Required on create and optional on update. To rename the policy, set `name` - to a different value on update.""" - - on_securable_fullname: Optional[str] = None - """Full name of the securable on which the policy is defined. Required on create and ignored on - update.""" - - on_securable_type: Optional[SecurableType] = None - """Type of the securable on which the policy is defined. Only `CATALOG`, `SCHEMA` and `TABLE` are - supported at this moment. Required on create and ignored on update.""" - - row_filter: Optional[RowFilterOptions] = None - """Options for row filter policies. Valid only if `policy_type` is `POLICY_TYPE_ROW_FILTER`. - Required on create and optional on update. When specified on update, the new options will - replace the existing options as a whole.""" +class PrimaryKeyConstraint: + name: str + """The name of the constraint.""" - updated_at: Optional[int] = None - """Time at which the policy was last modified, in epoch milliseconds. Output only.""" + child_columns: List[str] + """Column names for this constraint.""" - updated_by: Optional[str] = None - """Username of the user who last modified the policy. Output only.""" + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" - when_condition: Optional[str] = None - """Optional condition when the policy should take effect.""" + timeseries_columns: Optional[List[str]] = None + """Column names that represent a timeseries.""" def as_dict(self) -> dict: - """Serializes the PolicyInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column_mask: - body["column_mask"] = self.column_mask.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.except_principals: - body["except_principals"] = [v for v in self.except_principals] - if self.for_securable_type is not None: - body["for_securable_type"] = self.for_securable_type.value - if self.id is not None: - body["id"] = self.id - if self.match_columns: - body["match_columns"] = [v.as_dict() for v in self.match_columns] + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] if self.name is not None: body["name"] = self.name - if self.on_securable_fullname is not None: - body["on_securable_fullname"] = self.on_securable_fullname - if self.on_securable_type is not None: - body["on_securable_type"] = self.on_securable_type.value - if self.policy_type is not None: - body["policy_type"] = self.policy_type.value - if self.row_filter: - body["row_filter"] = self.row_filter.as_dict() - if self.to_principals: - body["to_principals"] = [v for v in self.to_principals] - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.when_condition is not None: - body["when_condition"] = self.when_condition + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = [v for v in self.timeseries_columns] return body def as_shallow_dict(self) -> dict: - """Serializes the PolicyInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.column_mask: - body["column_mask"] = self.column_mask - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.except_principals: - body["except_principals"] = self.except_principals - if self.for_securable_type is not None: - body["for_securable_type"] = self.for_securable_type - if self.id is not None: - body["id"] = self.id - if self.match_columns: - body["match_columns"] = self.match_columns + if self.child_columns: + body["child_columns"] = self.child_columns if self.name is not None: body["name"] = self.name - if self.on_securable_fullname is not None: - body["on_securable_fullname"] = self.on_securable_fullname - if self.on_securable_type is not None: - body["on_securable_type"] = self.on_securable_type - if self.policy_type is not None: - body["policy_type"] = self.policy_type - if self.row_filter: - body["row_filter"] = self.row_filter - if self.to_principals: - body["to_principals"] = self.to_principals - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.when_condition is not None: - body["when_condition"] = self.when_condition + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = self.timeseries_columns return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: - """Deserializes the PolicyInfo from a dictionary.""" - return cls( - column_mask=_from_dict(d, "column_mask", ColumnMaskOptions), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - except_principals=d.get("except_principals", None), - for_securable_type=_enum(d, "for_securable_type", SecurableType), - id=d.get("id", None), - match_columns=_repeated_dict(d, "match_columns", MatchColumn), - name=d.get("name", None), - on_securable_fullname=d.get("on_securable_fullname", None), - on_securable_type=_enum(d, "on_securable_type", SecurableType), - policy_type=_enum(d, "policy_type", PolicyType), - row_filter=_from_dict(d, "row_filter", RowFilterOptions), - to_principals=d.get("to_principals", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - when_condition=d.get("when_condition", None), - ) - - -class PolicyType(Enum): - - POLICY_TYPE_COLUMN_MASK = "POLICY_TYPE_COLUMN_MASK" - POLICY_TYPE_ROW_FILTER = "POLICY_TYPE_ROW_FILTER" - - -@dataclass -class PrimaryKeyConstraint: - name: str - """The name of the constraint.""" - - child_columns: List[str] - """Column names for this constraint.""" - - rely: Optional[bool] = None - """True if the constraint is RELY, false or unset if NORELY.""" - - timeseries_columns: Optional[List[str]] = None - """Column names that represent a timeseries.""" - - def as_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = [v for v in self.timeseries_columns] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = self.timeseries_columns - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: - """Deserializes the PrimaryKeyConstraint from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: + """Deserializes the PrimaryKeyConstraint from a dictionary.""" return cls( child_columns=d.get("child_columns", None), name=d.get("name", None), @@ -7856,7 +7490,6 @@ class Privilege(Enum): CREATE_VOLUME = "CREATE_VOLUME" EXECUTE = "EXECUTE" EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" - EXTERNAL_USE_SCHEMA = "EXTERNAL_USE_SCHEMA" MANAGE = "MANAGE" MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" MODIFY = "MODIFY" @@ -7886,6 +7519,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -7894,6 +7531,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -7903,6 +7542,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -7910,7 +7551,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -8289,42 +7934,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: ) -@dataclass -class RowFilterOptions: - function_name: str - """The fully qualified name of the row filter function. The function is called on each row of the - target table. It should return a boolean value indicating whether the row should be visible to - the user. Required on create and update.""" - - using: Optional[List[FunctionArgument]] = None - """Optional list of column aliases or constant literals to be passed as arguments to the row filter - function. The type of each column should match the positional argument of the row filter - function.""" - - def as_dict(self) -> dict: - """Serializes the RowFilterOptions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.using: - body["using"] = [v.as_dict() for v in self.using] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RowFilterOptions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.using: - body["using"] = self.using - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RowFilterOptions: - """Deserializes the RowFilterOptions from a dictionary.""" - return cls(function_name=d.get("function_name", None), using=_repeated_dict(d, "using", FunctionArgument)) - - @dataclass class SchemaInfo: """Next ID: 40""" @@ -8540,7 +8149,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_SHAREPOINT_OAUTH_M2M = 264; Next id:265""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" @@ -8551,7 +8159,6 @@ class SecurableKind(Enum): TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" TABLE_DELTA_UNIFORM_HUDI_EXTERNAL = "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL = "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" - TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_DELTASHARING = "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_DELTASHARING" TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL = ( "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" ) @@ -8582,7 +8189,6 @@ class SecurableKind(Enum): TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" - TABLE_FOREIGN_PALANTIR = "TABLE_FOREIGN_PALANTIR" TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" @@ -8600,7 +8206,6 @@ class SecurableKind(Enum): TABLE_MATERIALIZED_VIEW = "TABLE_MATERIALIZED_VIEW" TABLE_MATERIALIZED_VIEW_DELTASHARING = "TABLE_MATERIALIZED_VIEW_DELTASHARING" TABLE_METRIC_VIEW = "TABLE_METRIC_VIEW" - TABLE_METRIC_VIEW_DELTASHARING = "TABLE_METRIC_VIEW_DELTASHARING" TABLE_ONLINE_VECTOR_INDEX_DIRECT = "TABLE_ONLINE_VECTOR_INDEX_DIRECT" TABLE_ONLINE_VECTOR_INDEX_REPLICA = "TABLE_ONLINE_VECTOR_INDEX_REPLICA" TABLE_ONLINE_VIEW = "TABLE_ONLINE_VIEW" @@ -8957,7 +8562,7 @@ class SystemSchemaInfo: state: str """The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | - ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE | MANAGED""" + ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" def as_dict(self) -> dict: """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" @@ -9004,7 +8609,6 @@ class SystemType(Enum): SAP = "SAP" SERVICENOW = "SERVICENOW" SNOWFLAKE = "SNOWFLAKE" - STREAM_NATIVE = "STREAM_NATIVE" TABLEAU = "TABLEAU" TERADATA = "TERADATA" WORKDAY = "WORKDAY" @@ -10251,11 +9855,6 @@ class VolumeInfo: """The unique identifier of the volume""" volume_type: Optional[VolumeType] = None - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" @@ -10360,6 +9959,11 @@ def from_dict(cls, d: Dict[str, Any]) -> VolumeInfo: class VolumeType(Enum): + """The type of the volume. An external volume is located in the specified external location. A + managed volume is located in the default location which is specified by the parent schema, or + the parent catalog, or the Metastore. [Learn more] + + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" EXTERNAL = "EXTERNAL" MANAGED = "MANAGED" @@ -11096,6 +10700,7 @@ def create( options: Dict[str, str], *, comment: Optional[str] = None, + environment_settings: Optional[EnvironmentSettings] = None, properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None, ) -> ConnectionInfo: @@ -11112,6 +10717,8 @@ def create( A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -11124,6 +10731,8 @@ def create( body["comment"] = comment if connection_type is not None: body["connection_type"] = connection_type.value + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if name is not None: body["name"] = name if options is not None: @@ -11204,7 +10813,13 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = query["page_token"] = json["next_page_token"] def update( - self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None + self, + name: str, + options: Dict[str, str], + *, + environment_settings: Optional[EnvironmentSettings] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, ) -> ConnectionInfo: """Updates the connection that matches the supplied name. @@ -11212,6 +10827,8 @@ def update( Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) @@ -11220,6 +10837,8 @@ def update( :returns: :class:`ConnectionInfo` """ body = {} + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if new_name is not None: body["new_name"] = new_name if options is not None: @@ -11585,25 +11204,13 @@ def validate_credential( class EntityTagAssignmentsAPI: - """Tags are attributes that include keys and optional values that you can use to organize and categorize - entities in Unity Catalog. Entity tagging is currently supported on catalogs, schemas, tables (including - views), columns, volumes. With these APIs, users can create, update, delete, and list tag assignments - across Unity Catalog entities""" + """Entity Tag Assignments provide a unified interface for managing tag assignments on Unity Catalog entities.""" def __init__(self, api_client): self._api = api_client def create(self, tag_assignment: EntityTagAssignment) -> EntityTagAssignment: - """Creates a tag assignment for an Unity Catalog entity. - - To add tags to Unity Catalog entities, you must own the entity or have the following privileges: - - **APPLY TAG** on the entity - **USE SCHEMA** on the entity's parent schema - **USE CATALOG** on the - entity's parent catalog - - To add a governed tag to Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** - permission on the tag policy. See [Manage tag policy permissions]. - - [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions + """Create an tag assignment for an Unity Catalog entity. :param tag_assignment: :class:`EntityTagAssignment` @@ -11618,23 +11225,15 @@ def create(self, tag_assignment: EntityTagAssignment) -> EntityTagAssignment: res = self._api.do("POST", "/api/2.1/unity-catalog/entity-tag-assignments", body=body, headers=headers) return EntityTagAssignment.from_dict(res) - def delete(self, entity_type: str, entity_name: str, tag_key: str): - """Deletes a tag assignment for an Unity Catalog entity by its key. - - To delete tags from Unity Catalog entities, you must own the entity or have the following privileges: - - **APPLY TAG** on the entity - **USE_SCHEMA** on the entity's parent schema - **USE_CATALOG** on the - entity's parent catalog + def delete(self, entity_name: str, tag_key: str): + """Delete a tag assignment for an Unity Catalog entity. - To delete a governed tag from Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** - permission on the tag policy. See [Manage tag policy permissions]. - - [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions - - :param entity_type: str - The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, - columns, volumes. :param entity_name: str - The fully qualified name of the entity to which the tag is assigned + Required. The fully qualified structured name of the entity to which the tag is assigned. The entity + name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, + schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing + segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For + example, columns/catalog.schema.table.\`column/a\` :param tag_key: str Required. The key of the tag to delete @@ -11646,19 +11245,18 @@ def delete(self, entity_type: str, entity_name: str, tag_key: str): } self._api.do( - "DELETE", - f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", - headers=headers, + "DELETE", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", headers=headers ) - def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssignment: - """Gets a tag assignment for an Unity Catalog entity by tag key. + def get(self, entity_name: str, tag_key: str) -> EntityTagAssignment: + """Get a tag assignment for an Unity Catalog entity. - :param entity_type: str - The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, - columns, volumes. :param entity_name: str - The fully qualified name of the entity to which the tag is assigned + Required. The fully qualified structured name of the entity to which the tag is assigned. The entity + name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, + schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing + segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For + example, columns/catalog.schema.table.\`column/a\` :param tag_key: str Required. The key of the tag @@ -11670,22 +11268,21 @@ def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssi } res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", - headers=headers, + "GET", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", headers=headers ) return EntityTagAssignment.from_dict(res) def list( - self, entity_type: str, entity_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + self, entity_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[EntityTagAssignment]: """List tag assignments for an Unity Catalog entity - :param entity_type: str - The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, - columns, volumes. :param entity_name: str - The fully qualified name of the entity to which the tag is assigned + Required. The fully qualified structured name of the entity to which the tag is assigned. The entity + name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, + schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing + segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For + example, columns/catalog.schema.table.\`column/a\` :param max_results: int (optional) Optional. Maximum number of tag assignments to return in a single page :param page_token: str (optional) @@ -11705,10 +11302,7 @@ def list( while True: json = self._api.do( - "GET", - f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags", - query=query, - headers=headers, + "GET", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags", query=query, headers=headers ) if "tag_assignments" in json: for v in json["tag_assignments"]: @@ -11718,24 +11312,16 @@ def list( query["page_token"] = json["next_page_token"] def update( - self, entity_type: str, entity_name: str, tag_key: str, tag_assignment: EntityTagAssignment, update_mask: str + self, entity_name: str, tag_key: str, tag_assignment: EntityTagAssignment, update_mask: str ) -> EntityTagAssignment: - """Updates an existing tag assignment for an Unity Catalog entity. - - To update tags to Unity Catalog entities, you must own the entity or have the following privileges: - - **APPLY TAG** on the entity - **USE SCHEMA** on the entity's parent schema - **USE CATALOG** on the - entity's parent catalog + """Update a tag assignment for an Unity Catalog entity - To update a governed tag to Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** - permission on the tag policy. See [Manage tag policy permissions]. - - [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions - - :param entity_type: str - The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, - columns, volumes. :param entity_name: str - The fully qualified name of the entity to which the tag is assigned + Required. The fully qualified structured name of the entity to which the tag is assigned. The entity + name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, + schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing + segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For + example, columns/catalog.schema.table.\`column/a\` :param tag_key: str The key of the tag :param tag_assignment: :class:`EntityTagAssignment` @@ -11763,7 +11349,7 @@ def update( res = self._api.do( "PATCH", - f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", + f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", query=query, body=body, headers=headers, @@ -11951,8 +11537,7 @@ def create( enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly one - of the documented properties. + File event queue settings. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -12114,8 +11699,7 @@ def update( enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly one - of the documented properties. + File event queue settings. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) @@ -12494,6 +12078,7 @@ def get( securable_type: str, full_name: str, *, + include_deleted_principals: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None, @@ -12504,6 +12089,8 @@ def get( Type of securable. :param full_name: str Full name of securable. + :param include_deleted_principals: bool (optional) + Optional. If true, also return privilege assignments whose principals have been deleted. :param max_results: int (optional) Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the @@ -12523,6 +12110,8 @@ def get( """ query = {} + if include_deleted_principals is not None: + query["include_deleted_principals"] = include_deleted_principals if max_results is not None: query["max_results"] = max_results if page_token is not None: @@ -13194,185 +12783,6 @@ def get(self, name: str) -> OnlineTable: return OnlineTable.from_dict(res) -class PoliciesAPI: - """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies - in Unity Catalog. With ABAC policies, access is controlled in a hierarchical and scalable manner, based on - data attributes rather than specific resources, enabling more flexible and comprehensive access control. - ABAC policies in Unity Catalog support conditions on securable properties, governance tags, and - environment contexts. Callers must have the `MANAGE` privilege on a securable to view, create, update, or - delete ABAC policies.""" - - def __init__(self, api_client): - self._api = api_client - - def create_policy(self, policy_info: PolicyInfo) -> PolicyInfo: - """Creates a new policy on a securable. The new policy applies to the securable and all its descendants. - - :param policy_info: :class:`PolicyInfo` - Required. The policy to create. - - :returns: :class:`PolicyInfo` - """ - body = policy_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/policies", body=body, headers=headers) - return PolicyInfo.from_dict(res) - - def delete_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> DeletePolicyResponse: - """Delete an ABAC policy defined on a securable. - - :param on_securable_type: str - Required. The type of the securable to delete the policy from. - :param on_securable_fullname: str - Required. The fully qualified name of the securable to delete the policy from. - :param name: str - Required. The name of the policy to delete - - :returns: :class:`DeletePolicyResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", - headers=headers, - ) - return DeletePolicyResponse.from_dict(res) - - def get_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> PolicyInfo: - """Get the policy definition on a securable - - :param on_securable_type: str - Required. The type of the securable to retrieve the policy for. - :param on_securable_fullname: str - Required. The fully qualified name of securable to retrieve policy for. - :param name: str - Required. The name of the policy to retrieve. - - :returns: :class:`PolicyInfo` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", - headers=headers, - ) - return PolicyInfo.from_dict(res) - - def list_policies( - self, - on_securable_type: str, - on_securable_fullname: str, - *, - include_inherited: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[PolicyInfo]: - """List all policies defined on a securable. Optionally, the list can include inherited policies defined - on the securable's parent schema or catalog. - - :param on_securable_type: str - Required. The type of the securable to list policies for. - :param on_securable_fullname: str - Required. The fully qualified name of securable to list policies for. - :param include_inherited: bool (optional) - Optional. Whether to include policies defined on parent securables. By default, the inherited - policies are not included. - :param max_results: int (optional) - Optional. Maximum number of policies to return on a single page (page length). - When not set or set - to 0, the page length is set to a server configured value (recommended); - When set to a value - greater than 0, the page length is the minimum of this value and a server configured value; - :param page_token: str (optional) - Optional. Opaque pagination token to go to next page based on previous query. - - :returns: Iterator over :class:`PolicyInfo` - """ - - query = {} - if include_inherited is not None: - query["include_inherited"] = include_inherited - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do( - "GET", - f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}", - query=query, - headers=headers, - ) - if "policies" in json: - for v in json["policies"]: - yield PolicyInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_policy( - self, - on_securable_type: str, - on_securable_fullname: str, - name: str, - policy_info: PolicyInfo, - *, - update_mask: Optional[str] = None, - ) -> PolicyInfo: - """Update an ABAC policy on a securable. - - :param on_securable_type: str - Required. The type of the securable to update the policy for. - :param on_securable_fullname: str - Required. The fully qualified name of the securable to update the policy for. - :param name: str - Required. The name of the policy to update. - :param policy_info: :class:`PolicyInfo` - Optional fields to update. This is the request body for updating a policy. Use `update_mask` field - to specify which fields in the request is to be updated. - If `update_mask` is empty or "*", all - specified fields will be updated. - If `update_mask` is specified, only the fields specified in the - `update_mask` will be updated. If a field is specified in `update_mask` and not set in the request, - the field will be cleared. Users can use the update mask to explicitly unset optional fields such as - `exception_principals` and `when_condition`. - :param update_mask: str (optional) - Optional. The update mask field for specifying user intentions on which fields to update in the - request. - - :returns: :class:`PolicyInfo` - """ - body = policy_info.as_dict() - query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", - query=query, - body=body, - headers=headers, - ) - return PolicyInfo.from_dict(res) - - class QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. Most write operations @@ -14067,80 +13477,7 @@ def update( return RegisteredModelInfo.from_dict(res) -class ResourceQuotasAPI: - """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that - can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per - metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and - limits. For more information on resource quotas see the [Unity Catalog documentation]. - - [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas - """ - - def __init__(self, api_client): - self._api = api_client - - def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: - """The GetQuota API returns usage information for a single resource quota, defined as a child-parent - pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered - asynchronously. The updated count might not be returned in the first call. - - :param parent_securable_type: str - Securable type of the quota parent. - :param parent_full_name: str - Full name of the parent resource. Provide the metastore ID if the parent is a metastore. - :param quota_name: str - Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - - :returns: :class:`GetQuotaResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", - headers=headers, - ) - return GetQuotaResponse.from_dict(res) - - def list_quotas( - self, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[QuotaInfo]: - """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the - counts returned. This API does not trigger a refresh of quota counts. - - :param max_results: int (optional) - The number of quotas to return. - :param page_token: str (optional) - Opaque token for the next page of results. - - :returns: Iterator over :class:`QuotaInfo` - """ - - query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do( - "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers - ) - if "quotas" in json: - for v in json["quotas"]: - yield QuotaInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - -class RfaAPI: +class RequestForAccessAPI: """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables. @@ -14246,6 +13583,79 @@ def update_access_request_destinations( return AccessRequestDestinations.from_dict(res) +class ResourceQuotasAPI: + """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that + can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per + metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and + limits. For more information on resource quotas see the [Unity Catalog documentation]. + + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + """ + + def __init__(self, api_client): + self._api = api_client + + def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: + """The GetQuota API returns usage information for a single resource quota, defined as a child-parent + pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered + asynchronously. The updated count might not be returned in the first call. + + :param parent_securable_type: str + Securable type of the quota parent. + :param parent_full_name: str + Full name of the parent resource. Provide the metastore ID if the parent is a metastore. + :param quota_name: str + Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. + + :returns: :class:`GetQuotaResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", + headers=headers, + ) + return GetQuotaResponse.from_dict(res) + + def list_quotas( + self, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[QuotaInfo]: + """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the + counts returned. This API does not trigger a refresh of quota counts. + + :param max_results: int (optional) + The number of quotas to return. + :param page_token: str (optional) + Opaque token for the next page of results. + + :returns: Iterator over :class:`QuotaInfo` + """ + + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers + ) + if "quotas" in json: + for v in json["quotas"]: + yield QuotaInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have @@ -14264,7 +13674,7 @@ def create( properties: Optional[Dict[str, str]] = None, storage_root: Optional[str] = None, ) -> SchemaInfo: - """Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the + """Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. :param name: str @@ -14946,79 +14356,6 @@ class TablesAPI: def __init__(self, api_client): self._api = api_client - def create( - self, - name: str, - catalog_name: str, - schema_name: str, - table_type: TableType, - data_source_format: DataSourceFormat, - storage_location: str, - *, - columns: Optional[List[ColumnInfo]] = None, - properties: Optional[Dict[str, str]] = None, - ) -> TableInfo: - """Creates a new table in the specified catalog and schema. - - To create an external delta table, the caller must have the **EXTERNAL_USE_SCHEMA** privilege on the - parent schema and the **EXTERNAL_USE_LOCATION** privilege on the external location. These privileges - must always be granted explicitly, and cannot be inherited through ownership or **ALL_PRIVILEGES**. - - Standard UC permissions needed to create tables still apply: **USE_CATALOG** on the parent catalog (or - ownership of the parent catalog), **CREATE_TABLE** and **USE_SCHEMA** on the parent schema (or - ownership of the parent schema), and **CREATE_EXTERNAL_TABLE** on external location. - - The **columns** field needs to be in a Spark compatible format, so we recommend you use Spark to - create these tables. The API itself does not validate the correctness of the column spec. If the spec - is not Spark compatible, the tables may not be readable by Databricks Runtime. - - NOTE: The Create Table API for external clients only supports creating **external delta tables**. The - values shown in the respective enums are all values supported by Databricks, however for this specific - Create Table API, only **table_type** **EXTERNAL** and **data_source_format** **DELTA** are supported. - Additionally, column masks are not supported when creating tables through this API. - - :param name: str - Name of table, relative to parent schema. - :param catalog_name: str - Name of parent catalog. - :param schema_name: str - Name of parent schema relative to its parent catalog. - :param table_type: :class:`TableType` - :param data_source_format: :class:`DataSourceFormat` - :param storage_location: str - Storage root URL for table (for **MANAGED**, **EXTERNAL** tables). - :param columns: List[:class:`ColumnInfo`] (optional) - The array of __ColumnInfo__ definitions of the table's columns. - :param properties: Dict[str,str] (optional) - A map of key-value properties attached to the securable. - - :returns: :class:`TableInfo` - """ - body = {} - if catalog_name is not None: - body["catalog_name"] = catalog_name - if columns is not None: - body["columns"] = [v.as_dict() for v in columns] - if data_source_format is not None: - body["data_source_format"] = data_source_format.value - if name is not None: - body["name"] = name - if properties is not None: - body["properties"] = properties - if schema_name is not None: - body["schema_name"] = schema_name - if storage_location is not None: - body["storage_location"] = storage_location - if table_type is not None: - body["table_type"] = table_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/tables", body=body, headers=headers) - return TableInfo.from_dict(res) - def delete(self, full_name: str): """Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the @@ -15040,10 +14377,10 @@ def delete(self, full_name: str): def exists(self, full_name: str) -> TableExistsResponse: """Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the - owner of the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the + owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, - and either be the table owner or have the **SELECT** privilege on the table. * Have **BROWSE** - privilege on the parent catalog * Have **BROWSE** privilege on the parent schema + and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on + the parent catalog * Have BROWSE privilege on the parent schema. :param full_name: str Full name of the table. @@ -15068,9 +14405,9 @@ def get( ) -> TableInfo: """Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of - the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the - **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, - and either be the table owner or have the **SELECT** privilege on the table. + the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be + the table owner or have the SELECT privilege on the table. :param full_name: str Full name of the table. @@ -15268,86 +14605,19 @@ def update(self, full_name: str, *, owner: Optional[str] = None): self._api.do("PATCH", f"/api/2.1/unity-catalog/tables/{full_name}", body=body, headers=headers) -class TemporaryPathCredentialsAPI: - """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud - storage locations registered in Databricks. These credentials are employed to provide secure and - time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud - provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service - (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud - supports temporary credentials through OAuth 2.0. - - Temporary path credentials ensure that data access is limited in scope and duration, reducing the risk of - unauthorized access or misuse. To use the temporary path credentials API, a metastore admin needs to - enable the external_access_enabled flag (off by default) at the metastore level. A user needs to be - granted the EXTERNAL USE LOCATION permission by external location owner. For requests on existing external - tables, user also needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog - admin. - - Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog admin - explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for security reasons. - Similarly, EXTERNAL USE LOCATION is an external location level permission that can only be granted by - external location owner explicitly and is not included in external location ownership or ALL PRIVILEGES on - the external location for security reasons. - - This API only supports temporary path credentials for external locations and external tables, and volumes - will be supported in the future.""" - - def __init__(self, api_client): - self._api = api_client - - def generate_temporary_path_credentials( - self, url: str, operation: PathOperation, *, dry_run: Optional[bool] = None - ) -> GenerateTemporaryPathCredentialResponse: - """Get a short-lived credential for directly accessing cloud storage locations registered in Databricks. - The Generate Temporary Path Credentials API is only supported for external storage paths, specifically - external locations and external tables. Managed tables are not supported by this API. The metastore - must have **external_access_enabled** flag set to true (default false). The caller must have the - **EXTERNAL_USE_LOCATION** privilege on the external location; this privilege can only be granted by - external location owners. For requests on existing external tables, the caller must also have the - **EXTERNAL_USE_SCHEMA** privilege on the parent schema; this privilege can only be granted by catalog - owners. - - :param url: str - URL for path-based access. - :param operation: :class:`PathOperation` - The operation being performed on the path. - :param dry_run: bool (optional) - Optional. When set to true, the service will not validate that the generated credentials can perform - write operations, therefore no new paths will be created and the response will not contain valid - credentials. Defaults to false. - - :returns: :class:`GenerateTemporaryPathCredentialResponse` - """ - body = {} - if dry_run is not None: - body["dry_run"] = dry_run - if operation is not None: - body["operation"] = operation.value - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/unity-catalog/temporary-path-credentials", body=body, headers=headers) - return GenerateTemporaryPathCredentialResponse.from_dict(res) - - class TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage - locations where table data is stored in Databricks. These credentials are employed to provide secure and - time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud - provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service - (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud - supports temporary credentials through OAuth 2.0. - - Temporary table credentials ensure that data access is limited in scope and duration, reducing the risk of - unauthorized access or misuse. To use the temporary table credentials API, a metastore admin needs to - enable the external_access_enabled flag (off by default) at the metastore level, and user needs to be - granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin. Note that EXTERNAL USE - SCHEMA is a schema level permission that can only be granted by catalog admin explicitly and is not - included in schema ownership or ALL PRIVILEGES on the schema for security reasons.""" + locationswhere table data is stored in Databricks. These credentials are employed to provide secure and + time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider + has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS), + Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports + temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in + scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table + credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at + the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level + by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by + catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for + security reason.""" def __init__(self, api_client): self._api = api_client @@ -15356,9 +14626,9 @@ def generate_temporary_table_credentials( self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None ) -> GenerateTemporaryTableCredentialResponse: """Get a short-lived credential for directly accessing the table data on cloud storage. The metastore - must have **external_access_enabled** flag set to true (default false). The caller must have the - **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can only be granted by - catalog owners. + must have external_access_enabled flag set to true (default false). The caller must have + EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog + owners. :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is @@ -15426,11 +14696,6 @@ def create( :param name: str The name of the volume :param volume_type: :class:`VolumeType` - The type of the volume. An external volume is located in the specified external location. A managed - volume is located in the default location which is specified by the parent schema, or the parent - catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) @@ -15489,7 +14754,7 @@ def list( The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the - **READ VOLUME** privilege on the volume to receive the volumes in the response. For the latter case, + **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 57ea7e961..06ca9c2fe 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -45,7 +45,7 @@ class CleanRoom: using the separate CreateCleanRoomOutputCatalog API.""" owner: Optional[str] = None - """This is the Databricks username of the owner of the local clean room securable for permission + """This is Databricks username of the owner of the local clean room securable for permission management.""" remote_detailed_info: Optional[CleanRoomRemoteDetail] = None @@ -142,8 +142,7 @@ class CleanRoomAsset: For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name. For jar analyses, the name is the jar - analysis name.""" + For notebooks, the name is the notebook file name.""" asset_type: CleanRoomAssetAssetType """The type of the asset.""" @@ -352,13 +351,13 @@ class CleanRoomAssetNotebook: """Server generated etag that represents the notebook version.""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """Top-level status derived from all reviews""" + """top-level status derived from all reviews""" reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing approvals or rejections""" runner_collaborator_aliases: Optional[List[str]] = None - """Aliases of collaborators that can run the notebook.""" + """collaborators that can run the notebook""" def as_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body.""" @@ -547,12 +546,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetVolumeLocalDetails: @dataclass class CleanRoomAutoApprovalRule: author_collaborator_alias: Optional[str] = None - """Collaborator alias of the author covered by the rule. Only one of `author_collaborator_alias` - and `author_scope` can be set.""" author_scope: Optional[CleanRoomAutoApprovalRuleAuthorScope] = None - """Scope of authors covered by the rule. Only one of `author_collaborator_alias` and `author_scope` - can be set.""" clean_room_name: Optional[str] = None """The name of the clean room this auto-approval rule belongs to.""" @@ -567,7 +562,6 @@ class CleanRoomAutoApprovalRule: """The owner of the rule to whom the rule applies.""" runner_collaborator_alias: Optional[str] = None - """Collaborator alias of the runner covered by the rule.""" def as_dict(self) -> dict: """Serializes the CleanRoomAutoApprovalRule into a dictionary suitable for use as a JSON request body.""" @@ -643,7 +637,7 @@ class CleanRoomCollaborator: It is not restricted to these values and could change in the future""" global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore ID of the collaborator. The identifier is of format + """The global Unity Catalog metastore id of the collaborator. The identifier is of format cloud:region:metastore-uuid.""" invite_recipient_email: Optional[str] = None @@ -710,19 +704,19 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomCollaborator: @dataclass class CleanRoomNotebookReview: comment: Optional[str] = None - """Review comment""" + """review comment""" created_at_millis: Optional[int] = None - """When the review was submitted, in epoch milliseconds""" + """timestamp of when the review was submitted""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """Review outcome""" + """review outcome""" review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None - """Specified when the review was not explicitly made by a user""" + """specified when the review was not explicitly made by a user""" reviewer_collaborator_alias: Optional[str] = None - """Collaborator alias of the reviewer""" + """collaborator alias of the reviewer""" def as_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body.""" @@ -1106,7 +1100,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: @dataclass class CreateCleanRoomAssetReviewResponse: notebook_review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """Top-level status derived from all reviews""" + """top-level status derived from all reviews""" notebook_reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing notebook approvals or rejections""" @@ -1354,13 +1348,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: @dataclass class NotebookVersionReview: etag: str - """Etag identifying the notebook version""" + """etag that identifies the notebook version""" review_state: CleanRoomNotebookReviewNotebookReviewState - """Review outcome""" + """review outcome""" comment: Optional[str] = None - """Review comment""" + """review comment""" def as_dict(self) -> dict: """Serializes the NotebookVersionReview into a dictionary suitable for use as a JSON request body.""" @@ -1509,18 +1503,17 @@ def create_clean_room_asset_review( clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, - *, - notebook_review: Optional[NotebookVersionReview] = None, + notebook_review: NotebookVersionReview, ) -> CreateCleanRoomAssetReviewResponse: - """Submit an asset review + """submit an asset review :param clean_room_name: str Name of the clean room :param asset_type: :class:`CleanRoomAssetAssetType` - Asset type. Can either be NOTEBOOK_FILE or JAR_ANALYSIS. + can only be NOTEBOOK_FILE for now :param name: str Name of the asset - :param notebook_review: :class:`NotebookVersionReview` (optional) + :param notebook_review: :class:`NotebookVersionReview` :returns: :class:`CreateCleanRoomAssetReviewResponse` """ @@ -1627,8 +1620,7 @@ def update( For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name. For jar analyses, the name is the jar analysis - name. + For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to update. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 11a2a2b78..4d1ba3c0a 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -2718,6 +2718,177 @@ def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: return cls(destination=d.get("destination", None)) +@dataclass +class DefaultBaseEnvironment: + base_environment_cache: Optional[List[DefaultBaseEnvironmentCache]] = None + + created_timestamp: Optional[int] = None + + creator_user_id: Optional[int] = None + + environment: Optional[Environment] = None + """Note: we made `environment` non-internal because we need to expose its `client` field. All other + fields should be treated as internal.""" + + filepath: Optional[str] = None + + id: Optional[str] = None + + is_default: Optional[bool] = None + + last_updated_timestamp: Optional[int] = None + + last_updated_user_id: Optional[int] = None + + message: Optional[str] = None + + name: Optional[str] = None + + principal_ids: Optional[List[int]] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment.as_dict() + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = [v for v in self.principal_ids] + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.base_environment_cache: + body["base_environment_cache"] = self.base_environment_cache + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.environment: + body["environment"] = self.environment + if self.filepath is not None: + body["filepath"] = self.filepath + if self.id is not None: + body["id"] = self.id + if self.is_default is not None: + body["is_default"] = self.is_default + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user_id is not None: + body["last_updated_user_id"] = self.last_updated_user_id + if self.message is not None: + body["message"] = self.message + if self.name is not None: + body["name"] = self.name + if self.principal_ids: + body["principal_ids"] = self.principal_ids + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironment: + """Deserializes the DefaultBaseEnvironment from a dictionary.""" + return cls( + base_environment_cache=_repeated_dict(d, "base_environment_cache", DefaultBaseEnvironmentCache), + created_timestamp=d.get("created_timestamp", None), + creator_user_id=d.get("creator_user_id", None), + environment=_from_dict(d, "environment", Environment), + filepath=d.get("filepath", None), + id=d.get("id", None), + is_default=d.get("is_default", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + last_updated_user_id=d.get("last_updated_user_id", None), + message=d.get("message", None), + name=d.get("name", None), + principal_ids=d.get("principal_ids", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +@dataclass +class DefaultBaseEnvironmentCache: + indefinite_materialized_environment: Optional[MaterializedEnvironment] = None + + materialized_environment: Optional[MaterializedEnvironment] = None + + message: Optional[str] = None + + status: Optional[DefaultBaseEnvironmentCacheStatus] = None + + def as_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment.as_dict() + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultBaseEnvironmentCache into a shallow dictionary of its immediate attributes.""" + body = {} + if self.indefinite_materialized_environment: + body["indefinite_materialized_environment"] = self.indefinite_materialized_environment + if self.materialized_environment: + body["materialized_environment"] = self.materialized_environment + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironmentCache: + """Deserializes the DefaultBaseEnvironmentCache from a dictionary.""" + return cls( + indefinite_materialized_environment=_from_dict( + d, "indefinite_materialized_environment", MaterializedEnvironment + ), + materialized_environment=_from_dict(d, "materialized_environment", MaterializedEnvironment), + message=d.get("message", None), + status=_enum(d, "status", DefaultBaseEnvironmentCacheStatus), + ) + + +class DefaultBaseEnvironmentCacheStatus(Enum): + + CREATED = "CREATED" + EXPIRED = "EXPIRED" + FAILED = "FAILED" + INVALID = "INVALID" + PENDING = "PENDING" + REFRESHING = "REFRESHING" + + @dataclass class DeleteClusterResponse: def as_dict(self) -> dict: @@ -3136,9 +3307,6 @@ class Environment: version and a set of Python packages. The version is a string, consisting of an integer.""" jar_dependencies: Optional[List[str]] = None - """Use `java_dependencies` instead.""" - - java_dependencies: Optional[List[str]] = None """List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`.""" @@ -3153,8 +3321,6 @@ def as_dict(self) -> dict: body["environment_version"] = self.environment_version if self.jar_dependencies: body["jar_dependencies"] = [v for v in self.jar_dependencies] - if self.java_dependencies: - body["java_dependencies"] = [v for v in self.java_dependencies] return body def as_shallow_dict(self) -> dict: @@ -3168,8 +3334,6 @@ def as_shallow_dict(self) -> dict: body["environment_version"] = self.environment_version if self.jar_dependencies: body["jar_dependencies"] = self.jar_dependencies - if self.java_dependencies: - body["java_dependencies"] = self.java_dependencies return body @classmethod @@ -3180,7 +3344,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Environment: dependencies=d.get("dependencies", None), environment_version=d.get("environment_version", None), jar_dependencies=d.get("jar_dependencies", None), - java_dependencies=d.get("java_dependencies", None), ) @@ -3418,15 +3581,6 @@ class GcpAttributes: boot_disk_size: Optional[int] = None """Boot disk size in GB""" - first_on_demand: Optional[int] = None - """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This - value should be greater than 0, to make sure the cluster driver node is placed on an on-demand - instance. If this value is greater than or equal to the current cluster size, all nodes will be - placed on on-demand instances. If this value is less than the current cluster size, - `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed - on `availability` instances. Note that this value does not affect cluster size and cannot - currently be mutated over the lifetime of a cluster.""" - google_service_account: Optional[str] = None """If provided, the cluster will impersonate the google service account when accessing gcloud services (like GCS). The google service account must have previously been added to the @@ -3458,8 +3612,6 @@ def as_dict(self) -> dict: body["availability"] = self.availability.value if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3477,8 +3629,6 @@ def as_shallow_dict(self) -> dict: body["availability"] = self.availability if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3495,7 +3645,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: return cls( availability=_enum(d, "availability", GcpAvailability), boot_disk_size=d.get("boot_disk_size", None), - first_on_demand=d.get("first_on_demand", None), google_service_account=d.get("google_service_account", None), local_ssd_count=d.get("local_ssd_count", None), use_preemptible_executors=d.get("use_preemptible_executors", None), @@ -3831,6 +3980,10 @@ class GetInstancePool: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -3860,6 +4013,11 @@ class GetInstancePool: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -3904,6 +4062,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3918,6 +4078,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3949,6 +4111,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -3963,6 +4127,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -3990,6 +4156,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -3997,6 +4164,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -4642,6 +4810,10 @@ class InstancePoolAndStats: disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" + enable_auto_alternate_node_types: Optional[bool] = None + """For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids + are enabled. This field should not be true if node_type_flexibility is set.""" + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -4674,6 +4846,11 @@ class InstancePoolAndStats: min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" + node_type_flexibility: Optional[NodeTypeFlexibility] = None + """For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true.""" + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or @@ -4718,6 +4895,8 @@ def as_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4732,6 +4911,8 @@ def as_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility.as_dict() if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4763,6 +4944,8 @@ def as_shallow_dict(self) -> dict: body["default_tags"] = self.default_tags if self.disk_spec: body["disk_spec"] = self.disk_spec + if self.enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = self.enable_auto_alternate_node_types if self.enable_elastic_disk is not None: body["enable_elastic_disk"] = self.enable_elastic_disk if self.gcp_attributes: @@ -4777,6 +4960,8 @@ def as_shallow_dict(self) -> dict: body["max_capacity"] = self.max_capacity if self.min_idle_instances is not None: body["min_idle_instances"] = self.min_idle_instances + if self.node_type_flexibility: + body["node_type_flexibility"] = self.node_type_flexibility if self.node_type_id is not None: body["node_type_id"] = self.node_type_id if self.preloaded_docker_images: @@ -4804,6 +4989,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: custom_tags=d.get("custom_tags", None), default_tags=d.get("default_tags", None), disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_auto_alternate_node_types=d.get("enable_auto_alternate_node_types", None), enable_elastic_disk=d.get("enable_elastic_disk", None), gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), @@ -4811,6 +4997,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: instance_pool_name=d.get("instance_pool_name", None), max_capacity=d.get("max_capacity", None), min_idle_instances=d.get("min_idle_instances", None), + node_type_flexibility=_from_dict(d, "node_type_flexibility", NodeTypeFlexibility), node_type_id=d.get("node_type_id", None), preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), preloaded_spark_versions=d.get("preloaded_spark_versions", None), @@ -5676,6 +5863,39 @@ class ListClustersSortByField(Enum): DEFAULT = "DEFAULT" +@dataclass +class ListDefaultBaseEnvironmentsResponse: + default_base_environments: Optional[List[DefaultBaseEnvironment]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = [v.as_dict() for v in self.default_base_environments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_base_environments: + body["default_base_environments"] = self.default_base_environments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDefaultBaseEnvironmentsResponse: + """Deserializes the ListDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls( + default_base_environments=_repeated_dict(d, "default_base_environments", DefaultBaseEnvironment), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None @@ -5944,6 +6164,44 @@ def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: MapAny = Dict[str, Any] +@dataclass +class MaterializedEnvironment: + """Materialized Environment information enables environment sharing and reuse via Environment + Caching during library installations. Currently this feature is only supported for Python + libraries. + + - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and + environment materialization will occur. A new Materialized Environment metadata will be sent + from DP upon successful library installations and env materialization, and is persisted into + database by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment + will be sent to DP by LMv2, and DP will restore the cached environment from a store instead of + reinstalling libraries from scratch. + + If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new version""" + + last_updated_timestamp: Optional[int] = None + """The timestamp (in epoch milliseconds) when the materialized env is updated.""" + + def as_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MaterializedEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MaterializedEnvironment: + """Deserializes the MaterializedEnvironment from a dictionary.""" + return cls(last_updated_timestamp=d.get("last_updated_timestamp", None)) + + @dataclass class MavenLibrary: coordinates: str @@ -6238,6 +6496,28 @@ def from_dict(cls, d: Dict[str, Any]) -> NodeType: ) +@dataclass +class NodeTypeFlexibility: + """For Fleet-V2 using classic clusters, this object contains the information about the alternate + node type ids to use when attempting to launch a cluster. It can be used with both the driver + and worker node types.""" + + def as_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NodeTypeFlexibility into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NodeTypeFlexibility: + """Deserializes the NodeTypeFlexibility from a dictionary.""" + return cls() + + @dataclass class PendingInstanceError: """Error message of a failed pending instances""" @@ -6550,6 +6830,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) +@dataclass +class RefreshDefaultBaseEnvironmentsResponse: + def as_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RefreshDefaultBaseEnvironmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RefreshDefaultBaseEnvironmentsResponse: + """Deserializes the RefreshDefaultBaseEnvironmentsResponse from a dictionary.""" + return cls() + + @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -7159,8 +7457,6 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" - NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" - NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -7199,7 +7495,6 @@ class TerminationReasonCode(Enum): UNKNOWN = "UNKNOWN" UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" - USAGE_POLICY_ENTITLEMENT_DENIED = "USAGE_POLICY_ENTITLEMENT_DENIED" USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" USER_REQUEST = "USER_REQUEST" WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" @@ -9840,11 +10135,13 @@ def create( azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str, str]] = None, disk_spec: Optional[DiskSpec] = None, + enable_auto_alternate_node_types: Optional[bool] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None, remote_disk_throughput: Optional[int] = None, @@ -9873,6 +10170,9 @@ def create( - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param enable_elastic_disk: bool (optional) Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature @@ -9892,6 +10192,10 @@ def create( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param preloaded_docker_images: List[:class:`DockerImage`] (optional) Custom Docker Image BYOC :param preloaded_spark_versions: List[str] (optional) @@ -9916,6 +10220,8 @@ def create( body["custom_tags"] = custom_tags if disk_spec is not None: body["disk_spec"] = disk_spec.as_dict() + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if enable_elastic_disk is not None: body["enable_elastic_disk"] = enable_elastic_disk if gcp_attributes is not None: @@ -9928,6 +10234,8 @@ def create( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if preloaded_docker_images is not None: @@ -9971,9 +10279,11 @@ def edit( node_type_id: str, *, custom_tags: Optional[Dict[str, str]] = None, + enable_auto_alternate_node_types: Optional[bool] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, + node_type_flexibility: Optional[NodeTypeFlexibility] = None, remote_disk_throughput: Optional[int] = None, total_initial_remote_disk_size: Optional[int] = None, ): @@ -9994,6 +10304,9 @@ def edit( EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags + :param enable_auto_alternate_node_types: bool (optional) + For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids are + enabled. This field should not be true if node_type_flexibility is set. :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances @@ -10006,6 +10319,10 @@ def edit( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool + :param node_type_flexibility: :class:`NodeTypeFlexibility` (optional) + For pools with node type flexibility (Fleet-V2), this object contains the information about the + alternate node type ids to use when attempting to launch a cluster if the node type id is not + available. This field should not be set if enable_auto_alternate_node_types is true. :param remote_disk_throughput: int (optional) If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED types. @@ -10018,6 +10335,8 @@ def edit( body = {} if custom_tags is not None: body["custom_tags"] = custom_tags + if enable_auto_alternate_node_types is not None: + body["enable_auto_alternate_node_types"] = enable_auto_alternate_node_types if idle_instance_autotermination_minutes is not None: body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes if instance_pool_id is not None: @@ -10028,6 +10347,8 @@ def edit( body["max_capacity"] = max_capacity if min_idle_instances is not None: body["min_idle_instances"] = min_idle_instances + if node_type_flexibility is not None: + body["node_type_flexibility"] = node_type_flexibility.as_dict() if node_type_id is not None: body["node_type_id"] = node_type_id if remote_disk_throughput is not None: @@ -10372,6 +10693,48 @@ def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + def create_default_base_environment( + self, default_base_environment: DefaultBaseEnvironment, *, request_id: Optional[str] = None + ) -> DefaultBaseEnvironment: + """Create a default base environment within workspaces to define the environment version and a list of + dependencies to be used in serverless notebooks and jobs. This process will asynchronously generate a + cache to optimize dependency resolution. + + :param default_base_environment: :class:`DefaultBaseEnvironment` + :param request_id: str (optional) + A unique identifier for this request. A random UUID is recommended. This request is only idempotent + if a `request_id` is provided. + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/default-base-environments", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def delete_default_base_environment(self, id: str): + """Delete the default base environment given an ID. The default base environment may be used by + downstream workloads. Please ensure that the deletion is intentional. + + :param id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/default-base-environments/{id}", headers=headers) + def install(self, cluster_id: str, libraries: List[Library]): """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -10395,6 +10758,53 @@ def install(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) + def list_default_base_environments( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DefaultBaseEnvironment]: + """List default base environments defined in the workspaces for the requested user. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`DefaultBaseEnvironment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/default-base-environments", query=query, headers=headers) + if "default_base_environments" in json: + for v in json["default_base_environments"]: + yield DefaultBaseEnvironment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def refresh_default_base_environments(self, ids: List[str]): + """Refresh the cached default base environments for the given IDs. This process will asynchronously + regenerate the caches. The existing caches remains available until it expires. + + :param ids: List[str] + + + """ + body = {} + if ids is not None: + body["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/default-base-environments/refresh", body=body, headers=headers) + def uninstall(self, cluster_id: str, libraries: List[Library]): """Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. @@ -10418,6 +10828,47 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) + def update_default_base_environment( + self, id: str, *, default_base_environment: Optional[DefaultBaseEnvironment] = None + ) -> DefaultBaseEnvironment: + """Update the default base environment for the given ID. This process will asynchronously regenerate the + cache. The existing cache remains available until it expires. + + :param id: str + :param default_base_environment: :class:`DefaultBaseEnvironment` (optional) + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if default_base_environment is not None: + body["default_base_environment"] = default_base_environment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/default-base-environments/{id}", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + + def update_default_default_base_environment(self, id: str) -> DefaultBaseEnvironment: + """Set the default base environment for the workspace. This marks the specified DBE as the workspace + default. + + :param id: str + + :returns: :class:`DefaultBaseEnvironment` + """ + body = {} + if id is not None: + body["id"] = id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", "/api/2.0/default-base-environments/default", body=body, headers=headers) + return DefaultBaseEnvironment.from_dict(res) + class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 9ea51bfb7..79de97355 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -102,6 +102,72 @@ def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: return cls(permission_set=d.get("permission_set", None)) +@dataclass +class CancelQueryExecutionResponse: + status: Optional[List[CancelQueryExecutionResponseStatus]] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = [v.as_dict() for v in self.status] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponse: + """Deserializes the CancelQueryExecutionResponse from a dictionary.""" + return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus)) + + +@dataclass +class CancelQueryExecutionResponseStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + pending: Optional[Empty] = None + + success: Optional[Empty] = None + + def as_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending.as_dict() + if self.success: + body["success"] = self.success.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.pending: + body["pending"] = self.pending + if self.success: + body["success"] = self.success + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponseStatus: + """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" + return cls( + data_token=d.get("data_token", None), + pending=_from_dict(d, "pending", Empty), + success=_from_dict(d, "success", Empty), + ) + + @dataclass class CronSchedule: quartz_cron_expression: str @@ -253,6 +319,45 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Empty into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class ExecuteQueryResponse: + def as_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExecuteQueryResponse: + """Deserializes the ExecuteQueryResponse from a dictionary.""" + return cls() + + @dataclass class GenieAttachment: """Genie AI Response""" @@ -263,9 +368,6 @@ class GenieAttachment: query: Optional[GenieQueryAttachment] = None """Query Attachment if Genie responds with a SQL query""" - suggested_questions: Optional[GenieSuggestedQuestionsAttachment] = None - """Follow-up questions suggested by Genie""" - text: Optional[TextAttachment] = None """Text Attachment if Genie responds with text""" @@ -276,8 +378,6 @@ def as_dict(self) -> dict: body["attachment_id"] = self.attachment_id if self.query: body["query"] = self.query.as_dict() - if self.suggested_questions: - body["suggested_questions"] = self.suggested_questions.as_dict() if self.text: body["text"] = self.text.as_dict() return body @@ -289,8 +389,6 @@ def as_shallow_dict(self) -> dict: body["attachment_id"] = self.attachment_id if self.query: body["query"] = self.query - if self.suggested_questions: - body["suggested_questions"] = self.suggested_questions if self.text: body["text"] = self.text return body @@ -301,7 +399,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieAttachment: return cls( attachment_id=d.get("attachment_id", None), query=_from_dict(d, "query", GenieQueryAttachment), - suggested_questions=_from_dict(d, "suggested_questions", GenieSuggestedQuestionsAttachment), text=_from_dict(d, "text", TextAttachment), ) @@ -421,39 +518,63 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: ) -@dataclass -class GenieFeedback: - """Feedback containing rating and optional comment""" +class GenieFeedbackRating(Enum): + """Feedback rating for Genie messages""" + + NEGATIVE = "NEGATIVE" + NONE = "NONE" + POSITIVE = "POSITIVE" - rating: Optional[GenieFeedbackRating] = None - """The feedback rating""" + +@dataclass +class GenieGenerateDownloadFullQueryResultResponse: + download_id: Optional[str] = None + """Download ID. Use this ID to track the download request in subsequent polling calls""" def as_dict(self) -> dict: - """Serializes the GenieFeedback into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.rating is not None: - body["rating"] = self.rating.value + if self.download_id is not None: + body["download_id"] = self.download_id return body def as_shallow_dict(self) -> dict: - """Serializes the GenieFeedback into a shallow dictionary of its immediate attributes.""" + """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.rating is not None: - body["rating"] = self.rating + if self.download_id is not None: + body["download_id"] = self.download_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieFeedback: - """Deserializes the GenieFeedback from a dictionary.""" - return cls(rating=_enum(d, "rating", GenieFeedbackRating)) + def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: + """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" + return cls(download_id=d.get("download_id", None)) -class GenieFeedbackRating(Enum): - """Feedback rating for Genie messages""" +@dataclass +class GenieGetDownloadFullQueryResultResponse: + statement_response: Optional[sql.StatementResponse] = None + """SQL Statement Execution response. See [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) for more details.""" - NEGATIVE = "NEGATIVE" - NONE = "NONE" - POSITIVE = "POSITIVE" + def as_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.statement_response: + body["statement_response"] = self.statement_response + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: + """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" + return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) @dataclass @@ -607,9 +728,6 @@ class GenieMessage: error: Optional[MessageError] = None """Error message if Genie failed to respond to the message""" - feedback: Optional[GenieFeedback] = None - """User feedback for the message if provided""" - last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" @@ -635,8 +753,6 @@ def as_dict(self) -> dict: body["created_timestamp"] = self.created_timestamp if self.error: body["error"] = self.error.as_dict() - if self.feedback: - body["feedback"] = self.feedback.as_dict() if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -666,8 +782,6 @@ def as_shallow_dict(self) -> dict: body["created_timestamp"] = self.created_timestamp if self.error: body["error"] = self.error - if self.feedback: - body["feedback"] = self.feedback if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -693,7 +807,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieMessage: conversation_id=d.get("conversation_id", None), created_timestamp=d.get("created_timestamp", None), error=_from_dict(d, "error", MessageError), - feedback=_from_dict(d, "feedback", GenieFeedback), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), message_id=d.get("message_id", None), @@ -714,8 +827,6 @@ class GenieQueryAttachment: last_updated_timestamp: Optional[int] = None """Time when the user updated the query last""" - parameters: Optional[List[QueryAttachmentParameter]] = None - query: Optional[str] = None """AI generated SQL query""" @@ -738,8 +849,6 @@ def as_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -759,8 +868,6 @@ def as_shallow_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp - if self.parameters: - body["parameters"] = self.parameters if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -778,7 +885,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieQueryAttachment: description=d.get("description", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), - parameters=_repeated_dict(d, "parameters", QueryAttachmentParameter), query=d.get("query", None), query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata), statement_id=d.get("statement_id", None), @@ -829,9 +935,6 @@ class GenieSpace: description: Optional[str] = None """Description of the Genie Space""" - warehouse_id: Optional[str] = None - """Warehouse associated with the Genie Space""" - def as_dict(self) -> dict: """Serializes the GenieSpace into a dictionary suitable for use as a JSON request body.""" body = {} @@ -841,8 +944,6 @@ def as_dict(self) -> dict: body["space_id"] = self.space_id if self.title is not None: body["title"] = self.title - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: @@ -854,19 +955,12 @@ def as_shallow_dict(self) -> dict: body["space_id"] = self.space_id if self.title is not None: body["title"] = self.title - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: """Deserializes the GenieSpace from a dictionary.""" - return cls( - description=d.get("description", None), - space_id=d.get("space_id", None), - title=d.get("title", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) @dataclass @@ -919,30 +1013,21 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationResponse: @dataclass -class GenieSuggestedQuestionsAttachment: - """Follow-up questions suggested by Genie""" - - questions: Optional[List[str]] = None - """The suggested follow-up questions""" - +class GetPublishedDashboardEmbeddedResponse: def as_dict(self) -> dict: - """Serializes the GenieSuggestedQuestionsAttachment into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.questions: - body["questions"] = [v for v in self.questions] return body def as_shallow_dict(self) -> dict: - """Serializes the GenieSuggestedQuestionsAttachment into a shallow dictionary of its immediate attributes.""" + """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.questions: - body["questions"] = self.questions return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieSuggestedQuestionsAttachment: - """Deserializes the GenieSuggestedQuestionsAttachment from a dictionary.""" - return cls(questions=d.get("questions", None)) + def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardEmbeddedResponse: + """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" + return cls() @dataclass @@ -1142,7 +1227,6 @@ class MessageErrorType(Enum): DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR" DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT" DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE" - EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION = "EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION" FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" @@ -1153,8 +1237,6 @@ class MessageErrorType(Enum): GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" - INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION = "INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION" - INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION" INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" @@ -1217,6 +1299,80 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" +@dataclass +class PendingStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + def as_dict(self) -> dict: + """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PendingStatus: + """Deserializes the PendingStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None)) + + +@dataclass +class PollQueryStatusResponse: + data: Optional[List[PollQueryStatusResponseData]] = None + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data: + body["data"] = [v.as_dict() for v in self.data] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data: + body["data"] = self.data + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponse: + """Deserializes the PollQueryStatusResponse from a dictionary.""" + return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData)) + + +@dataclass +class PollQueryStatusResponseData: + status: QueryResponseStatus + + def as_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status: + body["status"] = self.status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponseData: + """Deserializes the PollQueryStatusResponseData from a dictionary.""" + return cls(status=_from_dict(d, "status", QueryResponseStatus)) + + @dataclass class PublishedDashboard: display_name: Optional[str] = None @@ -1269,39 +1425,60 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: @dataclass -class QueryAttachmentParameter: - keyword: Optional[str] = None +class QueryResponseStatus: + canceled: Optional[Empty] = None + + closed: Optional[Empty] = None - sql_type: Optional[str] = None + pending: Optional[PendingStatus] = None + + statement_id: Optional[str] = None + """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be + identical to data_token in SuccessStatus and PendingStatus. This field is created for audit + logging purpose to record the statement_id of all QueryResponseStatus.""" - value: Optional[str] = None + success: Optional[SuccessStatus] = None def as_dict(self) -> dict: - """Serializes the QueryAttachmentParameter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.keyword is not None: - body["keyword"] = self.keyword - if self.sql_type is not None: - body["sql_type"] = self.sql_type - if self.value is not None: - body["value"] = self.value + """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled.as_dict() + if self.closed: + body["closed"] = self.closed.as_dict() + if self.pending: + body["pending"] = self.pending.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the QueryAttachmentParameter into a shallow dictionary of its immediate attributes.""" - body = {} - if self.keyword is not None: - body["keyword"] = self.keyword - if self.sql_type is not None: - body["sql_type"] = self.sql_type - if self.value is not None: - body["value"] = self.value + """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.canceled: + body["canceled"] = self.canceled + if self.closed: + body["closed"] = self.closed + if self.pending: + body["pending"] = self.pending + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.success: + body["success"] = self.success return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter: - """Deserializes the QueryAttachmentParameter from a dictionary.""" - return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None)) + def from_dict(cls, d: Dict[str, Any]) -> QueryResponseStatus: + """Deserializes the QueryResponseStatus from a dictionary.""" + return cls( + canceled=_from_dict(d, "canceled", Empty), + closed=_from_dict(d, "closed", Empty), + pending=_from_dict(d, "pending", PendingStatus), + statement_id=d.get("statement_id", None), + success=_from_dict(d, "success", SuccessStatus), + ) @dataclass @@ -1627,6 +1804,39 @@ def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get("user_id", None)) +@dataclass +class SuccessStatus: + data_token: str + """The token to poll for result asynchronously Example: + EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" + + truncated: Optional[bool] = None + """Whether the query result is truncated (either by byte limit or row limit)""" + + def as_dict(self) -> dict: + """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_token is not None: + body["data_token"] = self.data_token + if self.truncated is not None: + body["truncated"] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SuccessStatus: + """Deserializes the SuccessStatus from a dictionary.""" + return cls(data_token=d.get("data_token", None), truncated=d.get("truncated", None)) + + @dataclass class TextAttachment: content: Optional[str] = None @@ -1798,34 +2008,39 @@ def delete_conversation(self, space_id: str, conversation_id: str): self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}", headers=headers) - def delete_conversation_message(self, space_id: str, conversation_id: str, message_id: str): - """Delete a conversation message. + def execute_message_attachment_query( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: + """Execute the SQL for a message query attachment. Use this API when the query attachment has expired and + needs to be re-executed. :param space_id: str - The ID associated with the Genie space where the message is located. + Genie space ID :param conversation_id: str - The ID associated with the conversation. + Conversation ID :param message_id: str - The ID associated with the message to delete. - + Message ID + :param attachment_id: str + Attachment ID + :returns: :class:`GenieGetMessageQueryResultResponse` """ headers = { "Accept": "application/json", } - self._api.do( - "DELETE", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}", + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query", headers=headers, ) + return GenieGetMessageQueryResultResponse.from_dict(res) - def execute_message_attachment_query( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + def execute_message_query( + self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """Execute the SQL for a message query attachment. Use this API when the query attachment has expired and - needs to be re-executed. + """Execute the SQL query in the message. :param space_id: str Genie space ID @@ -1833,8 +2048,6 @@ def execute_message_attachment_query( Conversation ID :param message_id: str Message ID - :param attachment_id: str - Attachment ID :returns: :class:`GenieGetMessageQueryResultResponse` """ @@ -1845,16 +2058,19 @@ def execute_message_attachment_query( res = self._api.do( "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query", headers=headers, ) return GenieGetMessageQueryResultResponse.from_dict(res) - def execute_message_query( - self, space_id: str, conversation_id: str, message_id: str - ) -> GenieGetMessageQueryResultResponse: - """DEPRECATED: Use [Execute Message Attachment Query](:method:genie/executemessageattachmentquery) - instead. + def generate_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGenerateDownloadFullQueryResultResponse: + """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of + the download. The query result is stored in an external link and can be retrieved using the [Get + Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + See [Execute Statement](:method:statementexecution/executestatement) for more details. :param space_id: str Genie space ID @@ -1862,8 +2078,10 @@ def execute_message_query( Conversation ID :param message_id: str Message ID + :param attachment_id: str + Attachment ID - :returns: :class:`GenieGetMessageQueryResultResponse` + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` """ headers = { @@ -1872,10 +2090,47 @@ def execute_message_query( res = self._api.do( "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", headers=headers, ) - return GenieGetMessageQueryResultResponse.from_dict(res) + return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) + + def get_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str + ) -> GenieGetDownloadFullQueryResultResponse: + """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + successfully receiving a `download_id`, use this API to poll the download progress. When the download + is complete, the API returns one or more external links to the query result files. Warning: Databricks + strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. + You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` + disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute + Statement](:method:statementexecution/executestatement) for more details. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + :param attachment_id: str + Attachment ID + :param download_id: str + Download ID. This ID is provided by the [Generate Download + endpoint](:method:genie/generateDownloadFullQueryResult) + + :returns: :class:`GenieGetDownloadFullQueryResultResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", + headers=headers, + ) + return GenieGetDownloadFullQueryResultResponse.from_dict(res) def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: """Get message from conversation. @@ -1933,8 +2188,8 @@ def get_message_attachment_query_result( def get_message_query_result( self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """DEPRECATED: Use [Get Message Attachment Query Result](:method:genie/getmessageattachmentqueryresult) - instead. + """Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. :param space_id: str Genie space ID @@ -1960,8 +2215,8 @@ def get_message_query_result( def get_message_query_result_by_attachment( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: - """DEPRECATED: Use [Get Message Attachment Query Result](:method:genie/getmessageattachmentqueryresult) - instead. + """Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. :param space_id: str Genie space ID @@ -2037,20 +2292,12 @@ def list_conversation_messages( return GenieListConversationMessagesResponse.from_dict(res) def list_conversations( - self, - space_id: str, - *, - include_all: Optional[bool] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, + self, space_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> GenieListConversationsResponse: """Get a list of conversations in a Genie Space. :param space_id: str The ID of the Genie space to retrieve conversations from. - :param include_all: bool (optional) - Include all conversations in the space across all users. Requires at least CAN MANAGE permission on - the space. :param page_size: int (optional) Maximum number of conversations to return per page :param page_token: str (optional) @@ -2060,8 +2307,6 @@ def list_conversations( """ query = {} - if include_all is not None: - query["include_all"] = include_all if page_size is not None: query["page_size"] = page_size if page_token is not None: @@ -2098,7 +2343,9 @@ def list_spaces( res = self._api.do("GET", "/api/2.0/genie/spaces", query=query, headers=headers) return GenieListSpacesResponse.from_dict(res) - def send_message_feedback(self, space_id: str, conversation_id: str, message_id: str, rating: GenieFeedbackRating): + def send_message_feedback( + self, space_id: str, conversation_id: str, message_id: str, feedback_rating: GenieFeedbackRating + ): """Send feedback for a message. :param space_id: str @@ -2107,14 +2354,14 @@ def send_message_feedback(self, space_id: str, conversation_id: str, message_id: The ID associated with the conversation. :param message_id: str The ID associated with the message to provide feedback for. - :param rating: :class:`GenieFeedbackRating` + :param feedback_rating: :class:`GenieFeedbackRating` The rating (POSITIVE, NEGATIVE, or NONE). """ body = {} - if rating is not None: - body["rating"] = rating.value + if feedback_rating is not None: + body["feedback_rating"] = feedback_rating.value headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -2649,6 +2896,21 @@ class LakeviewEmbeddedAPI: def __init__(self, api_client): self._api = api_client + def get_published_dashboard_embedded(self, dashboard_id: str): + """Get the current published dashboard within an embedded context. + + :param dashboard_id: str + UUID identifying the published dashboard. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded", headers=headers) + def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: @@ -2677,3 +2939,93 @@ def get_published_dashboard_token_info( "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) + + +class QueryExecutionAPI: + """Query execution APIs for AI / BI Dashboards""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_published_query_execution( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> CancelQueryExecutionResponse: + """Cancel the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`CancelQueryExecutionResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("DELETE", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return CancelQueryExecutionResponse.from_dict(res) + + def execute_published_dashboard_query( + self, dashboard_name: str, dashboard_revision_id: str, *, override_warehouse_id: Optional[str] = None + ): + """Execute a query for a published dashboard. + + :param dashboard_name: str + Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the + list of datasets, warehouse_id, and embedded_credentials + :param dashboard_revision_id: str + :param override_warehouse_id: str (optional) + A dashboard schedule can override the warehouse used as compute for processing the published + dashboard queries + + + """ + body = {} + if dashboard_name is not None: + body["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + body["dashboard_revision_id"] = dashboard_revision_id + if override_warehouse_id is not None: + body["override_warehouse_id"] = override_warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/lakeview-query/query/published", body=body, headers=headers) + + def poll_published_query_status( + self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None + ) -> PollQueryStatusResponse: + """Poll the results for the a query for a published, embedded dashboard. + + :param dashboard_name: str + :param dashboard_revision_id: str + :param tokens: List[str] (optional) + Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ + + :returns: :class:`PollQueryStatusResponse` + """ + + query = {} + if dashboard_name is not None: + query["dashboard_name"] = dashboard_name + if dashboard_revision_id is not None: + query["dashboard_revision_id"] = dashboard_revision_id + if tokens is not None: + query["tokens"] = [v for v in tokens] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) + return PollQueryStatusResponse.from_dict(res) diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 73d084ce2..efd4cc9e1 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -18,38 +18,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CustomTag: - key: Optional[str] = None - """The key of the custom tag.""" - - value: Optional[str] = None - """The value of the custom tag.""" - - def as_dict(self) -> dict: - """Serializes the CustomTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CustomTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CustomTag: - """Deserializes the CustomTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - @dataclass class DatabaseCatalog: name: str @@ -145,6 +113,11 @@ class DatabaseInstance: name: str """The name of the instance. This is the unique identifier for the instance.""" + budget_policy_id: Optional[str] = None + """The desired budget policy to associate with the instance. This field is only returned on + create/update responses, and represents the customer provided budget policy. See + effective_budget_policy_id for the policy that is actually applied to the instance.""" + capacity: Optional[str] = None """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" @@ -157,46 +130,51 @@ class DatabaseInstance: creator: Optional[str] = None """The email of the creator of the instance.""" - custom_tags: Optional[List[CustomTag]] = None - """Custom tags associated with the instance. This field is only included on create and update - responses.""" - - effective_capacity: Optional[str] = None - """Deprecated. The sku of the instance; this field will always match the value of capacity.""" - - effective_custom_tags: Optional[List[CustomTag]] = None - """The recorded custom tags associated with the instance.""" + effective_budget_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" effective_enable_pg_native_login: Optional[bool] = None - """Whether the instance has PG native password login enabled.""" + """xref AIP-129. `enable_pg_native_login` is owned by the client, while + `effective_enable_pg_native_login` is owned by the server. `enable_pg_native_login` will only be + set in Create/Update response messages if and only if the user provides the field via the + request. `effective_enable_pg_native_login` on the other hand will always bet set in all + response messages (Create/Update/Get/List).""" effective_enable_readable_secondaries: Optional[bool] = None - """Whether secondaries serving read-only traffic are enabled. Defaults to false.""" + """xref AIP-129. `enable_readable_secondaries` is owned by the client, while + `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries` + will only be set in Create/Update response messages if and only if the user provides the field + via the request. `effective_enable_readable_secondaries` on the other hand will always bet set + in all response messages (Create/Update/Get/List).""" effective_node_count: Optional[int] = None - """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults - to 1 primary and 0 secondaries.""" + """xref AIP-129. `node_count` is owned by the client, while `effective_node_count` is owned by the + server. `node_count` will only be set in Create/Update response messages if and only if the user + provides the field via the request. `effective_node_count` on the other hand will always bet set + in all response messages (Create/Update/Get/List).""" effective_retention_window_in_days: Optional[int] = None - """The retention window for the instance. This is the time window in days for which the historical - data is retained.""" + """xref AIP-129. `retention_window_in_days` is owned by the client, while + `effective_retention_window_in_days` is owned by the server. `retention_window_in_days` will + only be set in Create/Update response messages if and only if the user provides the field via + the request. `effective_retention_window_in_days` on the other hand will always bet set in all + response messages (Create/Update/Get/List).""" effective_stopped: Optional[bool] = None - """Whether the instance is stopped.""" - - effective_usage_policy_id: Optional[str] = None - """The policy that is applied to the instance.""" + """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the + server. `stopped` will only be set in Create/Update response messages if and only if the user + provides the field via the request. `effective_stopped` on the other hand will always bet set in + all response messages (Create/Update/Get/List).""" enable_pg_native_login: Optional[bool] = None - """Whether to enable PG native password login on the instance. Defaults to false.""" + """Whether the instance has PG native password login enabled. Defaults to true.""" enable_readable_secondaries: Optional[bool] = None """Whether to enable secondaries to serve read-only traffic. Defaults to false.""" node_count: Optional[int] = None """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults - to 1 primary and 0 secondaries. This field is input only, see effective_node_count for the - output.""" + to 1 primary and 0 secondaries.""" parent_instance_ref: Optional[DatabaseInstanceRef] = None """The ref of the parent instance. This is only available if the instance is child instance. Input: @@ -221,17 +199,16 @@ class DatabaseInstance: """The current state of the instance.""" stopped: Optional[bool] = None - """Whether to stop the instance. An input only param, see effective_stopped for the output.""" + """Whether the instance is stopped.""" uid: Optional[str] = None """An immutable UUID identifier for the instance.""" - usage_policy_id: Optional[str] = None - """The desired usage policy to associate with the instance.""" - def as_dict(self) -> dict: """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -240,12 +217,8 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.effective_capacity is not None: - body["effective_capacity"] = self.effective_capacity - if self.effective_custom_tags: - body["effective_custom_tags"] = [v.as_dict() for v in self.effective_custom_tags] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -256,8 +229,6 @@ def as_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -282,13 +253,13 @@ def as_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid - if self.usage_policy_id is not None: - body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -297,12 +268,8 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.effective_capacity is not None: - body["effective_capacity"] = self.effective_capacity - if self.effective_custom_tags: - body["effective_custom_tags"] = self.effective_custom_tags + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -313,8 +280,6 @@ def as_shallow_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -339,27 +304,23 @@ def as_shallow_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid - if self.usage_policy_id is not None: - body["usage_policy_id"] = self.usage_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: """Deserializes the DatabaseInstance from a dictionary.""" return cls( + budget_policy_id=d.get("budget_policy_id", None), capacity=d.get("capacity", None), child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef), creation_time=d.get("creation_time", None), creator=d.get("creator", None), - custom_tags=_repeated_dict(d, "custom_tags", CustomTag), - effective_capacity=d.get("effective_capacity", None), - effective_custom_tags=_repeated_dict(d, "effective_custom_tags", CustomTag), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), effective_enable_pg_native_login=d.get("effective_enable_pg_native_login", None), effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None), effective_node_count=d.get("effective_node_count", None), effective_retention_window_in_days=d.get("effective_retention_window_in_days", None), effective_stopped=d.get("effective_stopped", None), - effective_usage_policy_id=d.get("effective_usage_policy_id", None), enable_pg_native_login=d.get("enable_pg_native_login", None), enable_readable_secondaries=d.get("enable_readable_secondaries", None), name=d.get("name", None), @@ -372,7 +333,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: state=_enum(d, "state", DatabaseInstanceState), stopped=d.get("stopped", None), uid=d.get("uid", None), - usage_policy_id=d.get("usage_policy_id", None), ) @@ -393,9 +353,12 @@ class DatabaseInstanceRef: provided as input to create a child instance.""" effective_lsn: Optional[str] = None - """For a parent ref instance, this is the LSN on the parent instance from which the instance was - created. For a child ref instance, this is the LSN on the instance from which the child instance - was created.""" + """xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. `lsn` + will only be set in Create/Update response messages if and only if the user provides the field + via the request. `effective_lsn` on the other hand will always bet set in all response messages + (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from + which the instance was created. For a child ref instance, this is the LSN on the instance from + which the child instance was created.""" lsn: Optional[str] = None """User-specified WAL LSN of the ref database instance. @@ -455,34 +418,25 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef: class DatabaseInstanceRole: """A DatabaseInstanceRole represents a Postgres role in a database instance.""" - name: str - """The name of the role. This is the unique identifier for the role in an instance.""" - attributes: Optional[DatabaseInstanceRoleAttributes] = None - """The desired API-exposed Postgres role attribute to associate with the role. Optional.""" - - effective_attributes: Optional[DatabaseInstanceRoleAttributes] = None - """The attributes that are applied to the role.""" + """API-exposed Postgres role attributes""" identity_type: Optional[DatabaseInstanceRoleIdentityType] = None """The type of the role.""" - instance_name: Optional[str] = None - membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None """An enum value for a standard role that this role is a member of.""" + name: Optional[str] = None + """The name of the role. This is the unique identifier for the role in an instance.""" + def as_dict(self) -> dict: """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body.""" body = {} if self.attributes: body["attributes"] = self.attributes.as_dict() - if self.effective_attributes: - body["effective_attributes"] = self.effective_attributes.as_dict() if self.identity_type is not None: body["identity_type"] = self.identity_type.value - if self.instance_name is not None: - body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role.value if self.name is not None: @@ -494,12 +448,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.attributes: body["attributes"] = self.attributes - if self.effective_attributes: - body["effective_attributes"] = self.effective_attributes if self.identity_type is not None: body["identity_type"] = self.identity_type - if self.instance_name is not None: - body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role if self.name is not None: @@ -511,9 +461,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole: """Deserializes the DatabaseInstanceRole from a dictionary.""" return cls( attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes), - effective_attributes=_from_dict(d, "effective_attributes", DatabaseInstanceRoleAttributes), identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType), - instance_name=d.get("instance_name", None), membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole), name=d.get("name", None), ) @@ -611,6 +559,9 @@ class DatabaseTable: When creating a table in a standard catalog, this field is required. In this scenario, specifying this field will allow targeting an arbitrary postgres database.""" + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} @@ -620,6 +571,8 @@ def as_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: @@ -631,6 +584,8 @@ def as_shallow_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body @classmethod @@ -640,6 +595,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: database_instance_name=d.get("database_instance_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), + table_serving_url=d.get("table_serving_url", None), ) @@ -822,6 +778,9 @@ class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other fields of pipeline are still inferred by table def internally""" + budget_policy_id: Optional[str] = None + """Budget policy of this pipeline.""" + storage_catalog: Optional[str] = None """This field needs to be specified if the destination catalog is a managed postgres catalog. @@ -837,6 +796,8 @@ class NewPipelineSpec: def as_dict(self) -> dict: """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -846,6 +807,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -855,7 +818,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) + return cls( + budget_policy_id=d.get("budget_policy_id", None), + storage_catalog=d.get("storage_catalog", None), + storage_schema=d.get("storage_schema", None), + ) class ProvisioningInfoState(Enum): @@ -985,6 +952,9 @@ class SyncedDatabaseTable: spec: Optional[SyncedTableSpec] = None + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -1007,6 +977,8 @@ def as_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body @@ -1028,6 +1000,8 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @@ -1043,6 +1017,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), ) @@ -1607,32 +1582,22 @@ def create_database_instance_and_wait( return self.create_database_instance(database_instance=database_instance).result(timeout=timeout) def create_database_instance_role( - self, - instance_name: str, - database_instance_role: DatabaseInstanceRole, - *, - database_instance_name: Optional[str] = None, + self, instance_name: str, database_instance_role: DatabaseInstanceRole ) -> DatabaseInstanceRole: """Create a role for a Database Instance. :param instance_name: str :param database_instance_role: :class:`DatabaseInstanceRole` - :param database_instance_name: str (optional) :returns: :class:`DatabaseInstanceRole` """ body = database_instance_role.as_dict() - query = {} - if database_instance_name is not None: - query["database_instance_name"] = database_instance_name headers = { "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do( - "POST", f"/api/2.0/database/instances/{instance_name}/roles", query=query, body=body, headers=headers - ) + res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers) return DatabaseInstanceRole.from_dict(res) def create_database_table(self, table: DatabaseTable) -> DatabaseTable: @@ -1691,8 +1656,12 @@ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, p By default, a instance cannot be deleted if it has descendant instances created via PITR. If this flag is specified as true, all descendent instances will be deleted as well. :param purge: bool (optional) - Deprecated. Omitting the field or setting it to true will result in the field being hard deleted. - Setting a value of false will throw a bad request. + Note purge=false is in development. If false, the database instance is soft deleted (implementation + pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD + operations nor connected to. However they can be undeleted by calling the undelete API for a limited + time (implementation pending). If true, the database instance is hard deleted and cannot be + undeleted. For the time being, setting this value to true is required to delete an instance (soft + delete is not yet supported). """ @@ -1768,6 +1737,28 @@ def delete_synced_database_table(self, name: str): self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers) + def failover_database_instance( + self, name: str, *, failover_target_database_instance_name: Optional[str] = None + ) -> DatabaseInstance: + """Failover the primary node of a Database Instance to a secondary. + + :param name: str + Name of the instance to failover. + :param failover_target_database_instance_name: str (optional) + + :returns: :class:`DatabaseInstance` + """ + body = {} + if failover_target_database_instance_name is not None: + body["failover_target_database_instance_name"] = failover_target_database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/instances/{name}/failover", body=body, headers=headers) + return DatabaseInstance.from_dict(res) + def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: """Find a Database Instance by uid. @@ -1900,7 +1891,7 @@ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: def list_database_catalogs( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseCatalog]: - """This API is currently unimplemented, but exposed for Terraform support. + """List all Database Catalogs within a Database Instance. :param instance_name: str Name of the instance to get database catalogs for. @@ -1935,9 +1926,7 @@ def list_database_catalogs( def list_database_instance_roles( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseInstanceRole]: - """START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With more - recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These APIs will - remain effectively undocumented/UI-only and we'll aim for a new public roles API as part of V2 PuPr. + """START OF PG ROLE APIs Section :param instance_name: str :param page_size: int (optional) @@ -2002,7 +1991,7 @@ def list_database_instances( def list_synced_database_tables( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SyncedDatabaseTable]: - """This API is currently unimplemented, but exposed for Terraform support. + """List all Synced Database Tables within a Database Instance. :param instance_name: str Name of the instance to get synced tables for. @@ -2037,7 +2026,7 @@ def list_synced_database_tables( def update_database_catalog( self, name: str, database_catalog: DatabaseCatalog, update_mask: str ) -> DatabaseCatalog: - """This API is currently unimplemented, but exposed for Terraform support. + """Updated a Database Catalog. :param name: str The name of the catalog in UC. @@ -2069,8 +2058,7 @@ def update_database_instance( The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` :param update_mask: str - The list of fields to update. If unspecified, all fields will be updated when possible. To wipe out - custom_tags, specify custom_tags in the update_mask with an empty custom_tags map. + The list of fields to update. This field is not yet supported, and is ignored by the server. :returns: :class:`DatabaseInstance` """ @@ -2089,7 +2077,7 @@ def update_database_instance( def update_synced_database_table( self, name: str, synced_table: SyncedDatabaseTable, update_mask: str ) -> SyncedDatabaseTable: - """This API is currently unimplemented, but exposed for Terraform support. + """Update a Synced Database Table. :param name: str Full three-part (catalog, schema, table) name of the table. diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index a470d7544..09166b04f 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -124,244 +124,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AccessControlResponse: ) -@dataclass -class AccountGroup: - account_id: Optional[str] = None - """Databricks account ID""" - - display_name: Optional[str] = None - """String that represents a human-readable group name""" - - external_id: Optional[str] = None - """external_id should be unique for identifying groups""" - - id: Optional[str] = None - """Databricks group ID""" - - members: Optional[List[ComplexValue]] = None - - meta: Optional[ResourceMeta] = None - """Container for the group identifier. Workspace local versus account.""" - - roles: Optional[List[ComplexValue]] = None - """Indicates if the group has the admin role.""" - - def as_dict(self) -> dict: - """Serializes the AccountGroup into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.members: - body["members"] = [v.as_dict() for v in self.members] - if self.meta: - body["meta"] = self.meta.as_dict() - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountGroup into a shallow dictionary of its immediate attributes.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.members: - body["members"] = self.members - if self.meta: - body["meta"] = self.meta - if self.roles: - body["roles"] = self.roles - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountGroup: - """Deserializes the AccountGroup from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - display_name=d.get("displayName", None), - external_id=d.get("externalId", None), - id=d.get("id", None), - members=_repeated_dict(d, "members", ComplexValue), - meta=_from_dict(d, "meta", ResourceMeta), - roles=_repeated_dict(d, "roles", ComplexValue), - ) - - -@dataclass -class AccountServicePrincipal: - account_id: Optional[str] = None - """Databricks account ID""" - - active: Optional[bool] = None - """If this user is active""" - - application_id: Optional[str] = None - """UUID relating to the service principal""" - - display_name: Optional[str] = None - """String that represents a concatenation of given and family names.""" - - external_id: Optional[str] = None - - id: Optional[str] = None - """Databricks service principal ID.""" - - roles: Optional[List[ComplexValue]] = None - """Indicates if the group has the admin role.""" - - def as_dict(self) -> dict: - """Serializes the AccountServicePrincipal into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.active is not None: - body["active"] = self.active - if self.application_id is not None: - body["applicationId"] = self.application_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountServicePrincipal into a shallow dictionary of its immediate attributes.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.active is not None: - body["active"] = self.active - if self.application_id is not None: - body["applicationId"] = self.application_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.roles: - body["roles"] = self.roles - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountServicePrincipal: - """Deserializes the AccountServicePrincipal from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - active=d.get("active", None), - application_id=d.get("applicationId", None), - display_name=d.get("displayName", None), - external_id=d.get("externalId", None), - id=d.get("id", None), - roles=_repeated_dict(d, "roles", ComplexValue), - ) - - -@dataclass -class AccountUser: - account_id: Optional[str] = None - """Databricks account ID""" - - active: Optional[bool] = None - """If this user is active""" - - display_name: Optional[str] = None - """String that represents a concatenation of given and family names. For example `John Smith`.""" - - emails: Optional[List[ComplexValue]] = None - """All the emails associated with the Databricks user.""" - - external_id: Optional[str] = None - """External ID is not currently supported. It is reserved for future use.""" - - id: Optional[str] = None - """Databricks user ID.""" - - name: Optional[Name] = None - - roles: Optional[List[ComplexValue]] = None - """Indicates if the group has the admin role.""" - - user_name: Optional[str] = None - """Email address of the Databricks user.""" - - def as_dict(self) -> dict: - """Serializes the AccountUser into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.active is not None: - body["active"] = self.active - if self.display_name is not None: - body["displayName"] = self.display_name - if self.emails: - body["emails"] = [v.as_dict() for v in self.emails] - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.name: - body["name"] = self.name.as_dict() - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - if self.user_name is not None: - body["userName"] = self.user_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountUser into a shallow dictionary of its immediate attributes.""" - body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.active is not None: - body["active"] = self.active - if self.display_name is not None: - body["displayName"] = self.display_name - if self.emails: - body["emails"] = self.emails - if self.external_id is not None: - body["externalId"] = self.external_id - if self.id is not None: - body["id"] = self.id - if self.name: - body["name"] = self.name - if self.roles: - body["roles"] = self.roles - if self.user_name is not None: - body["userName"] = self.user_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountUser: - """Deserializes the AccountUser from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - active=d.get("active", None), - display_name=d.get("displayName", None), - emails=_repeated_dict(d, "emails", ComplexValue), - external_id=d.get("externalId", None), - id=d.get("id", None), - name=_from_dict(d, "name", Name), - roles=_repeated_dict(d, "roles", ComplexValue), - user_name=d.get("userName", None), - ) - - @dataclass class Actor: """represents an identity trying to access a resource - user or a service principal group can be a @@ -663,7 +425,6 @@ class Group: [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" external_id: Optional[str] = None - """external_id should be unique for identifying groups""" groups: Optional[List[ComplexValue]] = None @@ -749,13 +510,16 @@ class GroupSchema(Enum): @dataclass -class ListAccountGroupsResponse: +class ListGroupsResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[AccountGroup]] = None + resources: Optional[List[Group]] = None """User objects returned in the response.""" + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the service principal.""" + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -763,12 +527,14 @@ class ListAccountGroupsResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListAccountGroupsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -776,12 +542,14 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountGroupsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -789,24 +557,33 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountGroupsResponse: - """Deserializes the ListAccountGroupsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: + """Deserializes the ListGroupsResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", AccountGroup), + resources=_repeated_dict(d, "Resources", Group), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) +class ListResponseSchema(Enum): + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" + + @dataclass -class ListAccountServicePrincipalsResponse: +class ListServicePrincipalResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[AccountServicePrincipal]] = None + resources: Optional[List[ServicePrincipal]] = None """User objects returned in the response.""" + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the List response.""" + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -814,12 +591,14 @@ class ListAccountServicePrincipalsResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListAccountServicePrincipalsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -827,12 +606,14 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountServicePrincipalsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -840,24 +621,34 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountServicePrincipalsResponse: - """Deserializes the ListAccountServicePrincipalsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: + """Deserializes the ListServicePrincipalResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", AccountServicePrincipal), + resources=_repeated_dict(d, "Resources", ServicePrincipal), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) +class ListSortOrder(Enum): + + ASCENDING = "ascending" + DESCENDING = "descending" + + @dataclass -class ListAccountUsersResponse: +class ListUsersResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[AccountUser]] = None + resources: Optional[List[User]] = None """User objects returned in the response.""" + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the List response.""" + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -865,12 +656,14 @@ class ListAccountUsersResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListAccountUsersResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -878,12 +671,14 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountUsersResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -891,199 +686,12 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountUsersResponse: - """Deserializes the ListAccountUsersResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: + """Deserializes the ListUsersResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", AccountUser), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) - - -@dataclass -class ListGroupsResponse: - items_per_page: Optional[int] = None - """Total results returned in the response.""" - - resources: Optional[List[Group]] = None - """User objects returned in the response.""" - - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the service principal.""" - - start_index: Optional[int] = None - """Starting index of all the results that matched the request filters. First item is number 1.""" - - total_results: Optional[int] = None - """Total results that match the request filters.""" - - def as_dict(self) -> dict: - """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: - """Deserializes the ListGroupsResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", Group), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) - - -class ListResponseSchema(Enum): - - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" - - -@dataclass -class ListServicePrincipalResponse: - items_per_page: Optional[int] = None - """Total results returned in the response.""" - - resources: Optional[List[ServicePrincipal]] = None - """User objects returned in the response.""" - - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the List response.""" - - start_index: Optional[int] = None - """Starting index of all the results that matched the request filters. First item is number 1.""" - - total_results: Optional[int] = None - """Total results that match the request filters.""" - - def as_dict(self) -> dict: - """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: - """Deserializes the ListServicePrincipalResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", ServicePrincipal), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) - - -class ListSortOrder(Enum): - - ASCENDING = "ascending" - DESCENDING = "descending" - - -@dataclass -class ListUsersResponse: - items_per_page: Optional[int] = None - """Total results returned in the response.""" - - resources: Optional[List[User]] = None - """User objects returned in the response.""" - - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the List response.""" - - start_index: Optional[int] = None - """Starting index of all the results that matched the request filters. First item is number 1.""" - - total_results: Optional[int] = None - """Total results that match the request filters.""" - - def as_dict(self) -> dict: - """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: - """Deserializes the ListUsersResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", User), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), + resources=_repeated_dict(d, "Resources", User), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) @@ -2039,6 +1647,24 @@ class ServicePrincipalSchema(Enum): URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" +@dataclass +class UpdateResponse: + def as_dict(self) -> dict: + """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: + """Deserializes the UpdateResponse from a dictionary.""" + return cls() + + @dataclass class User: active: Optional[bool] = None @@ -2260,8 +1886,7 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service - principal. `resource=accounts//tagPolicies/` | A resource name for the - tag policy. + principal. :returns: :class:`GetAssignableRolesForResourceResponse` """ @@ -2293,8 +1918,6 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: set on the group. `name=accounts//servicePrincipals//ruleSets/default` | A name for a rule set on the service principal. - `name=accounts//tagPolicies//ruleSets/default` | A name for a rule set on - the tag policy. :param etag: str Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from @@ -2374,8 +1997,7 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service - principal. `resource=accounts//tagPolicies/` | A resource name for the - tag policy. + principal. :returns: :class:`GetAssignableRolesForResourceResponse` """ @@ -2404,8 +2026,6 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: set on the group. `name=accounts//servicePrincipals//ruleSets/default` | A name for a rule set on the service principal. - `name=accounts//tagPolicies//ruleSets/default` | A name for a rule set on - the tag policy. :param etag: str Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from @@ -2441,2020 +2061,20 @@ def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetR Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - :returns: :class:`RuleSetResponse` - """ - body = {} - if name is not None: - body["name"] = name - if rule_set is not None: - body["rule_set"] = rule_set.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", "/api/2.0/preview/accounts/access-control/rule-sets", body=body, headers=headers) - return RuleSetResponse.from_dict(res) - - -class AccountGroupsV2API: - """Groups simplify identity management, making it easier to assign access to Databricks account, data, and - other securable objects. - - It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, - instead of to users individually. All Databricks account identities can be assigned as members of groups, - and members inherit permissions that are assigned to their group.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - display_name: Optional[str] = None, - external_id: Optional[str] = None, - id: Optional[str] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - ) -> AccountGroup: - """Creates a group in the Databricks account with a unique name, using the supplied group details. - - :param display_name: str (optional) - String that represents a human-readable group name - :param external_id: str (optional) - :param id: str (optional) - Databricks group ID - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - - :returns: :class:`AccountGroup` - """ - body = {} - if display_name is not None: - body["displayName"] = display_name - if external_id is not None: - body["externalId"] = external_id - if id is not None: - body["id"] = id - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", body=body, headers=headers - ) - return AccountGroup.from_dict(res) - - def delete(self, id: str): - """Deletes a group from the Databricks account. - - :param id: str - Unique ID for a group in the Databricks account. - - - """ - - headers = {} - - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) - - def get(self, id: str) -> AccountGroup: - """Gets the information for a specific group in the Databricks account. - - :param id: str - Unique ID for a group in the Databricks account. - - :returns: :class:`AccountGroup` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) - return AccountGroup.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[AccountGroup]: - """Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this endpoint - will not return members. Instead, members should be retrieved by iterating through `Get group - details`. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. Default is 10000. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`AccountGroup` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", query=query, headers=headers - ) - if "Resources" in json: - for v in json["Resources"]: - yield AccountGroup.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a group. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers - ) - - def update( - self, - id: str, - *, - display_name: Optional[str] = None, - external_id: Optional[str] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - ): - """Updates the details of a group by replacing the entire group entity. - - :param id: str - Databricks group ID - :param display_name: str (optional) - String that represents a human-readable group name - :param external_id: str (optional) - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - - - """ - body = {} - if display_name is not None: - body["displayName"] = display_name - if external_id is not None: - body["externalId"] = external_id - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers) - - -class AccountServicePrincipalsV2API: - """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. - Databricks recommends creating service principals to run production jobs or modify production data. If all - processes that act on production data run with service principals, interactive users do not need any - write, delete, or modify privileges in production. This eliminates the risk of a user overwriting - production data by accident.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - external_id: Optional[str] = None, - id: Optional[str] = None, - roles: Optional[List[ComplexValue]] = None, - ) -> AccountServicePrincipal: - """Creates a new service principal in the Databricks account. - - :param active: bool (optional) - If this user is active - :param application_id: str (optional) - UUID relating to the service principal - :param display_name: str (optional) - String that represents a concatenation of given and family names. - :param external_id: str (optional) - :param id: str (optional) - Databricks service principal ID. - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - - :returns: :class:`AccountServicePrincipal` - """ - body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if external_id is not None: - body["externalId"] = external_id - if id is not None: - body["id"] = id - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", body=body, headers=headers - ) - return AccountServicePrincipal.from_dict(res) - - def delete(self, id: str): - """Delete a single service principal in the Databricks account. - - :param id: str - Unique ID for a service principal in the Databricks account. - - - """ - - headers = {} - - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers - ) - - def get(self, id: str) -> AccountServicePrincipal: - """Gets the details for a single service principal define in the Databricks account. - - :param id: str - Unique ID for a service principal in the Databricks account. - - :returns: :class:`AccountServicePrincipal` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers - ) - return AccountServicePrincipal.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[AccountServicePrincipal]: - """Gets the set of service principals associated with a Databricks account. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. Default is 10000. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`AccountServicePrincipal` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", - query=query, - headers=headers, - ) - if "Resources" in json: - for v in json["Resources"]: - yield AccountServicePrincipal.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a single service principal in the Databricks account. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", - body=body, - headers=headers, - ) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - external_id: Optional[str] = None, - roles: Optional[List[ComplexValue]] = None, - ): - """Updates the details of a single service principal. - - This action replaces the existing service principal with the same name. - - :param id: str - Databricks service principal ID. - :param active: bool (optional) - If this user is active - :param application_id: str (optional) - UUID relating to the service principal - :param display_name: str (optional) - String that represents a concatenation of given and family names. - :param external_id: str (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - - - """ - body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if external_id is not None: - body["externalId"] = external_id - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", - body=body, - headers=headers, - ) - - -class AccountUsersV2API: - """User identities recognized by Databricks and represented by email addresses. - - Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity - provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your - identity provider to create users and groups in Databricks account and give them the proper level of - access. When a user leaves your organization or no longer needs access to Databricks account, admins can - terminate the user in your identity provider and that user’s account will also be removed from - Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from - accessing sensitive data.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - id: Optional[str] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - user_name: Optional[str] = None, - ) -> AccountUser: - """Creates a new user in the Databricks account. This new user will also be added to the Databricks - account. - - :param active: bool (optional) - If this user is active - :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. - :param emails: List[:class:`ComplexValue`] (optional) - All the emails associated with the Databricks user. - :param external_id: str (optional) - External ID is not currently supported. It is reserved for future use. - :param id: str (optional) - Databricks user ID. - :param name: :class:`Name` (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - :param user_name: str (optional) - Email address of the Databricks user. - - :returns: :class:`AccountUser` - """ - body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if external_id is not None: - body["externalId"] = external_id - if id is not None: - body["id"] = id - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", body=body, headers=headers - ) - return AccountUser.from_dict(res) - - def delete(self, id: str): - """Deletes a user. Deleting a user from a Databricks account also removes objects associated with the - user. - - :param id: str - Unique ID for a user in the Databricks account. - - - """ - - headers = {} - - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", headers=headers) - - def get( - self, - id: str, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[GetSortOrder] = None, - start_index: Optional[int] = None, - ) -> AccountUser: - """Gets information for a specific user in Databricks account. - - :param id: str - Unique ID for a user in the Databricks account. - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. Default is 10000. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. Multi-part paths are supported. For example, `userName`, - `name.givenName`, and `emails`. - :param sort_order: :class:`GetSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: :class:`AccountUser` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", query=query, headers=headers - ) - return AccountUser.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[AccountUser]: - """Gets details for all the users associated with a Databricks account. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. Default is 10000. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. Multi-part paths are supported. For example, `userName`, - `name.givenName`, and `emails`. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`AccountUser` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", query=query, headers=headers - ) - if "Resources" in json: - for v in json["Resources"]: - yield AccountUser.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates a user resource by applying the supplied operations on specific user attributes. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers - ) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - user_name: Optional[str] = None, - ): - """Replaces a user's information with the data supplied in request. - - :param id: str - Databricks user ID. - :param active: bool (optional) - If this user is active - :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. - :param emails: List[:class:`ComplexValue`] (optional) - All the emails associated with the Databricks user. - :param external_id: str (optional) - External ID is not currently supported. It is reserved for future use. - :param name: :class:`Name` (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Indicates if the group has the admin role. - :param user_name: str (optional) - Email address of the Databricks user. - - - """ - body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if external_id is not None: - body["externalId"] = external_id - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) - - -class CurrentUserAPI: - """This API allows retrieving information about currently authenticated user or service principal.""" - - def __init__(self, api_client): - self._api = api_client - - def me(self) -> User: - """Get details about the current method caller's identity. - - - :returns: :class:`User` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) - return User.from_dict(res) - - -class GroupsV2API: - """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and - other securable objects. - - It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, - instead of to users individually. All Databricks workspace identities can be assigned as members of - groups, and members inherit permissions that are assigned to their group.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ) -> Group: - """Creates a group in the Databricks workspace with a unique name, using the supplied group details. - - :param display_name: str (optional) - String that represents a human-readable group name - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the group. See [assigning entitlements] for a full list of supported - values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param id: str (optional) - Databricks group ID - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`GroupSchema`] (optional) - The schema of the group. - - :returns: :class:`Group` - """ - body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/Groups", body=body, headers=headers) - return Group.from_dict(res) - - def delete(self, id: str): - """Deletes a group from the Databricks workspace. - - :param id: str - Unique ID for a group in the Databricks workspace. - - - """ - - headers = {} - - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) - - def get(self, id: str) -> Group: - """Gets the information for a specific group in the Databricks workspace. - - :param id: str - Unique ID for a group in the Databricks workspace. - - :returns: :class:`Group` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) - return Group.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[Group]: - """Gets all details of the groups associated with the Databricks workspace. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`Group` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - yield Group.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a group. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - - def update( - self, - id: str, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ): - """Updates the details of a group by replacing the entire group entity. - - :param id: str - Databricks group ID - :param display_name: str (optional) - String that represents a human-readable group name - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the group. See [assigning entitlements] for a full list of supported - values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`GroupSchema`] (optional) - The schema of the group. - - - """ - body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - - -class PermissionMigrationAPI: - """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" - - def __init__(self, api_client): - self._api = api_client - - def migrate_permissions( - self, - workspace_id: int, - from_workspace_group_name: str, - to_account_group_name: str, - *, - size: Optional[int] = None, - ) -> MigratePermissionsResponse: - """Migrate Permissions. - - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. - :param from_workspace_group_name: str - The name of the workspace group that permissions will be migrated from. - :param to_account_group_name: str - The name of the account group that permissions will be migrated to. - :param size: int (optional) - The maximum number of permissions that will be migrated. - - :returns: :class:`MigratePermissionsResponse` - """ - body = {} - if from_workspace_group_name is not None: - body["from_workspace_group_name"] = from_workspace_group_name - if size is not None: - body["size"] = size - if to_account_group_name is not None: - body["to_account_group_name"] = to_account_group_name - if workspace_id is not None: - body["workspace_id"] = workspace_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) - return MigratePermissionsResponse.from_dict(res) - - -class PermissionsAPI: - """Permissions API are used to create read, write, edit, update and manage access for various users on - different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage - or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - attach to clusters. * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users - can use cluster policies. * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage - which users can view, manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job - permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a job. * - **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or manage - MLflow experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which - users can read, edit, or manage MLflow registered models. * **[Instance Pool - permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * **[Repo - permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * **[Serving endpoint - permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a serving - endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage - SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or - use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of - the required permissions for specific actions or abilities and other important information, see [Access - Control]. Note that to manage access control on service principals, use **[Account Access Control - Proxy](:service:accountaccesscontrolproxy)**. - - [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" - - def __init__(self, api_client): - self._api = api_client - - def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: - """Gets the permissions of an object. Objects can inherit permissions from their parent objects or root - object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - - :returns: :class:`ObjectPermissions` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) - return ObjectPermissions.from_dict(res) - - def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: - """Gets the permission levels that a user can have on an object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - - :returns: :class:`GetPermissionLevelsResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers - ) - return GetPermissionLevelsResponse.from_dict(res) - - def set( - self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct - permissions if none are specified. Objects can inherit permissions from their parent objects or root - object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) - - :returns: :class:`ObjectPermissions` - """ - body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) - return ObjectPermissions.from_dict(res) - - def update( - self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Updates the permissions on an object. Objects can inherit permissions from their parent objects or - root object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) - - :returns: :class:`ObjectPermissions` - """ - body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) - return ObjectPermissions.from_dict(res) - - -class ServicePrincipalsV2API: - """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. - Databricks recommends creating service principals to run production jobs or modify production data. If all - processes that act on production data run with service principals, interactive users do not need any - write, delete, or modify privileges in production. This eliminates the risk of a user overwriting - production data by accident.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ) -> ServicePrincipal: - """Creates a new service principal in the Databricks workspace. - - :param active: bool (optional) - If this user is active - :param application_id: str (optional) - UUID relating to the service principal - :param display_name: str (optional) - String that represents a concatenation of given and family names. - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the service principal. See [assigning entitlements] for a full list of - supported values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param id: str (optional) - Databricks service principal ID. - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`ServicePrincipalSchema`] (optional) - The schema of the List response. - - :returns: :class:`ServicePrincipal` - """ - body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/ServicePrincipals", body=body, headers=headers) - return ServicePrincipal.from_dict(res) - - def delete(self, id: str): - """Delete a single service principal in the Databricks workspace. - - :param id: str - Unique ID for a service principal in the Databricks workspace. - - - """ - - headers = {} - - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) - - def get(self, id: str) -> ServicePrincipal: - """Gets the details for a single service principal define in the Databricks workspace. - - :param id: str - Unique ID for a service principal in the Databricks workspace. - - :returns: :class:`ServicePrincipal` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) - return ServicePrincipal.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[ServicePrincipal]: - """Gets the set of service principals associated with a Databricks workspace. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`ServicePrincipal` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/ServicePrincipals", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - yield ServicePrincipal.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a single service principal in the Databricks workspace. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ): - """Updates the details of a single service principal. - - This action replaces the existing service principal with the same name. - - :param id: str - Databricks service principal ID. - :param active: bool (optional) - If this user is active - :param application_id: str (optional) - UUID relating to the service principal - :param display_name: str (optional) - String that represents a concatenation of given and family names. - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the service principal. See [assigning entitlements] for a full list of - supported values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`ServicePrincipalSchema`] (optional) - The schema of the List response. - - - """ - body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) - - -class UsersV2API: - """User identities recognized by Databricks and represented by email addresses. - - Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity - provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your - identity provider to create users and groups in Databricks workspace and give them the proper level of - access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can - terminate the user in your identity provider and that user’s account will also be removed from - Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from - accessing sensitive data.""" - - def __init__(self, api_client): - self._api = api_client - - def create( - self, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ) -> User: - """Creates a new user in the Databricks workspace. This new user will also be added to the Databricks - account. - - :param active: bool (optional) - If this user is active - :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. This - field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use - Account SCIM APIs to update `displayName`. - - [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation - :param emails: List[:class:`ComplexValue`] (optional) - All the emails associated with the Databricks user. - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - External ID is not currently supported. It is reserved for future use. - :param groups: List[:class:`ComplexValue`] (optional) - :param id: str (optional) - Databricks user ID. - :param name: :class:`Name` (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`UserSchema`] (optional) - The schema of the user. - :param user_name: str (optional) - Email address of the Databricks user. - - :returns: :class:`User` - """ - body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/Users", body=body, headers=headers) - return User.from_dict(res) - - def delete(self, id: str): - """Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the - user. - - :param id: str - Unique ID for a user in the Databricks workspace. - - - """ - - headers = {} - - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Users/{id}", headers=headers) - - def get( - self, - id: str, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[GetSortOrder] = None, - start_index: Optional[int] = None, - ) -> User: - """Gets information for a specific user in Databricks workspace. - - :param id: str - Unique ID for a user in the Databricks workspace. - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. Multi-part paths are supported. For example, `userName`, - `name.givenName`, and `emails`. - :param sort_order: :class:`GetSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: :class:`User` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Users/{id}", query=query, headers=headers) - return User.from_dict(res) - - def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: - """Gets the permission levels that a user can have on an object. - - - :returns: :class:`GetPasswordPermissionLevelsResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords/permissionLevels", headers=headers) - return GetPasswordPermissionLevelsResponse.from_dict(res) - - def get_permissions(self) -> PasswordPermissions: - """Gets the permissions of all passwords. Passwords can inherit permissions from their root object. - - - :returns: :class:`PasswordPermissions` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords", headers=headers) - return PasswordPermissions.from_dict(res) - - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[User]: - """Gets details for all the users associated with a Databricks workspace. - - :param attributes: str (optional) - Comma-separated list of attributes to return in response. - :param count: int (optional) - Desired number of results per page. - :param excluded_attributes: str (optional) - Comma-separated list of attributes to exclude in response. - :param filter: str (optional) - Query by which the results have to be filtered. Supported operators are equals(`eq`), - contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be - formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently - only support simple expressions. - - [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. Multi-part paths are supported. For example, `userName`, - `name.givenName`, and `emails`. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`User` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Users", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - yield User.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates a user resource by applying the supplied operations on specific user attributes. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) - - def set_permissions( - self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None - ) -> PasswordPermissions: - """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct - permissions if none are specified. Objects can inherit permissions from their root object. - - :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - - :returns: :class:`PasswordPermissions` - """ - body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) - return PasswordPermissions.from_dict(res) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ): - """Replaces a user's information with the data supplied in request. - - :param id: str - Databricks user ID. - :param active: bool (optional) - If this user is active - :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. This - field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use - Account SCIM APIs to update `displayName`. - - [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation - :param emails: List[:class:`ComplexValue`] (optional) - All the emails associated with the Databricks user. - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - External ID is not currently supported. It is reserved for future use. - :param groups: List[:class:`ComplexValue`] (optional) - :param name: :class:`Name` (optional) - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`UserSchema`] (optional) - The schema of the user. - :param user_name: str (optional) - Email address of the Databricks user. - - - """ - body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) - - def update_permissions( - self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None - ) -> PasswordPermissions: - """Updates the permissions on all passwords. Passwords can inherit permissions from their root object. - - :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - - :returns: :class:`PasswordPermissions` - """ - body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) - return PasswordPermissions.from_dict(res) - - -class WorkspaceAssignmentAPI: - """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your - account.""" - - def __init__(self, api_client): - self._api = api_client - - def delete(self, workspace_id: int, principal_id: int): - """Deletes the workspace permissions assignment in a given account and workspace for the specified - principal. - - :param workspace_id: int - The workspace ID for the account. - :param principal_id: int - The ID of the user, service principal, or group. - - - """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - headers=headers, - ) - - def get(self, workspace_id: int) -> WorkspacePermissions: - """Get an array of workspace permissions for the specified account and workspace. - - :param workspace_id: int - The workspace ID. - - :returns: :class:`WorkspacePermissions` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", - headers=headers, - ) - return WorkspacePermissions.from_dict(res) - - def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: - """Get the permission assignments for the specified Databricks account and Databricks workspace. - - :param workspace_id: int - The workspace ID for the account. - - :returns: Iterator over :class:`PermissionAssignment` - """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", - headers=headers, - ) - parsed = PermissionAssignments.from_dict(json).permission_assignments - return parsed if parsed is not None else [] - - def update( - self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None - ) -> PermissionAssignment: - """Creates or updates the workspace permissions assignment in a given account and workspace for the - specified principal. - - :param workspace_id: int - The workspace ID. - :param principal_id: int - The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] (optional) - Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" - (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values - will be ignored. Note that excluding this field, or providing unsupported values, will have the same - effect as providing an empty list, which will result in the deletion of all permissions for the - principal. - - :returns: :class:`PermissionAssignment` + :returns: :class:`RuleSetResponse` """ body = {} - if permissions is not None: - body["permissions"] = [v.value for v in permissions] + if name is not None: + body["name"] = name + if rule_set is not None: + body["rule_set"] = rule_set.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - body=body, - headers=headers, - ) - return PermissionAssignment.from_dict(res) + res = self._api.do("PUT", "/api/2.0/preview/accounts/access-control/rule-sets", body=body, headers=headers) + return RuleSetResponse.from_dict(res) class AccountGroupsAPI: @@ -5349,6 +2969,27 @@ def update( self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) +class CurrentUserAPI: + """This API allows retrieving information about currently authenticated user or service principal.""" + + def __init__(self, api_client): + self._api = api_client + + def me(self) -> User: + """Get details about the current method caller's identity. + + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) + return User.from_dict(res) + + class GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. @@ -5485,126 +3126,312 @@ def list( :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - :returns: Iterator over :class:`Group` + :returns: Iterator over :class:`Group` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + # deduplicate items that may have been added during iteration + seen = set() + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a group. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + + def update( + self, + id: str, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ): + """Updates the details of a group by replacing the entire group entity. + + :param id: str + Databricks group ID + :param display_name: str (optional) + String that represents a human-readable group name + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`GroupSchema`] (optional) + The schema of the group. + + + """ + body = {} + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + + +class PermissionMigrationAPI: + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" + + def __init__(self, api_client): + self._api = api_client + + def migrate_permissions( + self, + workspace_id: int, + from_workspace_group_name: str, + to_account_group_name: str, + *, + size: Optional[int] = None, + ) -> MigratePermissionsResponse: + """Migrate Permissions. + + :param workspace_id: int + WorkspaceId of the associated workspace where the permission migration will occur. + :param from_workspace_group_name: str + The name of the workspace group that permissions will be migrated from. + :param to_account_group_name: str + The name of the account group that permissions will be migrated to. + :param size: int (optional) + The maximum number of permissions that will be migrated. + + :returns: :class:`MigratePermissionsResponse` + """ + body = {} + if from_workspace_group_name is not None: + body["from_workspace_group_name"] = from_workspace_group_name + if size is not None: + body["size"] = size + if to_account_group_name is not None: + body["to_account_group_name"] = to_account_group_name + if workspace_id is not None: + body["workspace_id"] = workspace_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) + return MigratePermissionsResponse.from_dict(res) + + +class PermissionsAPI: + """Permissions API are used to create read, write, edit, update and manage access for various users on + different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage + or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or + attach to clusters. * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users + can use cluster policies. * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage + which users can view, manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a job. * + **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or manage + MLflow experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which + users can read, edit, or manage MLflow registered models. * **[Instance Pool + permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * **[Repo + permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * **[Serving endpoint + permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a serving + endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage + SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or + use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, + edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of + the required permissions for specific actions or abilities and other important information, see [Access + Control]. Note that to manage access control on service principals, use **[Account Access Control + Proxy](:service:accountaccesscontrolproxy)**. + + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" + + def __init__(self, api_client): + self._api = api_client + + def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: + """Gets the permissions of an object. Objects can inherit permissions from their parent objects or root + object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + + :returns: :class:`ObjectPermissions` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) + return ObjectPermissions.from_dict(res) + + def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: + """Gets the permission levels that a user can have on an object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + + :returns: :class:`GetPermissionLevelsResponse` """ - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index headers = { "Accept": "application/json", } - # deduplicate items that may have been added during iteration - seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a group. + res = self._api.do( + "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers + ) + return GetPermissionLevelsResponse.from_dict(res) - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + def set( + self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root + object. + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :returns: :class:`ObjectPermissions` """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] headers = { + "Accept": "application/json", "Content-Type": "application/json", } - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + res = self._api.do( + "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) + return ObjectPermissions.from_dict(res) def update( self, - id: str, + request_object_type: str, + request_object_id: str, *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ): - """Updates the details of a group by replacing the entire group entity. - - :param id: str - Databricks group ID - :param display_name: str (optional) - String that represents a human-readable group name - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the group. See [assigning entitlements] for a full list of supported - values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`GroupSchema`] (optional) - The schema of the group. + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: + """Updates the permissions on an object. Objects can inherit permissions from their parent objects or + root object. + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :returns: :class:`ObjectPermissions` """ body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] headers = { + "Accept": "application/json", "Content-Type": "application/json", } - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + res = self._api.do( + "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) + return ObjectPermissions.from_dict(res) class ServicePrincipalsAPI: @@ -6273,3 +4100,109 @@ def update_permissions( res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) return PasswordPermissions.from_dict(res) + + +class WorkspaceAssignmentAPI: + """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your + account.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, workspace_id: int, principal_id: int): + """Deletes the workspace permissions assignment in a given account and workspace for the specified + principal. + + :param workspace_id: int + The workspace ID for the account. + :param principal_id: int + The ID of the user, service principal, or group. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + headers=headers, + ) + + def get(self, workspace_id: int) -> WorkspacePermissions: + """Get an array of workspace permissions for the specified account and workspace. + + :param workspace_id: int + The workspace ID. + + :returns: :class:`WorkspacePermissions` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", + headers=headers, + ) + return WorkspacePermissions.from_dict(res) + + def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: + """Get the permission assignments for the specified Databricks account and Databricks workspace. + + :param workspace_id: int + The workspace ID for the account. + + :returns: Iterator over :class:`PermissionAssignment` + """ + + headers = { + "Accept": "application/json", + } + + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", + headers=headers, + ) + parsed = PermissionAssignments.from_dict(json).permission_assignments + return parsed if parsed is not None else [] + + def update( + self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None + ) -> PermissionAssignment: + """Creates or updates the workspace permissions assignment in a given account and workspace for the + specified principal. + + :param workspace_id: int + The workspace ID. + :param principal_id: int + The ID of the user, service principal, or group. + :param permissions: List[:class:`WorkspacePermission`] (optional) + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. + + :returns: :class:`PermissionAssignment` + """ + body = {} + if permissions is not None: + body["permissions"] = [v.value for v in permissions] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + body=body, + headers=headers, + ) + return PermissionAssignment.from_dict(res) diff --git a/databricks/sdk/service/iamv2.py b/databricks/sdk/service/iamv2.py index 25cd2ad25..243e2fe67 100755 --- a/databricks/sdk/service/iamv2.py +++ b/databricks/sdk/service/iamv2.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Dict, List, Optional -from ._internal import _enum, _from_dict, _repeated_enum +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -68,87 +68,154 @@ def from_dict(cls, d: Dict[str, Any]) -> Group: ) -class PrincipalType(Enum): - """The type of the principal (user/sp/group).""" +@dataclass +class ListGroupsResponse: + """TODO: Write description later when this method is implemented""" - GROUP = "GROUP" - SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL" - USER = "USER" + groups: Optional[List[Group]] = None + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + def as_dict(self) -> dict: + """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.groups: + body["groups"] = self.groups + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: + """Deserializes the ListGroupsResponse from a dictionary.""" + return cls(groups=_repeated_dict(d, "groups", Group), next_page_token=d.get("next_page_token", None)) @dataclass -class ResolveGroupResponse: - group: Optional[Group] = None - """The group that was resolved.""" +class ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + service_principals: Optional[List[ServicePrincipal]] = None def as_dict(self) -> dict: - """Serializes the ResolveGroupResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListServicePrincipalsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group: - body["group"] = self.group.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = [v.as_dict() for v in self.service_principals] return body def as_shallow_dict(self) -> dict: - """Serializes the ResolveGroupResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListServicePrincipalsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.group: - body["group"] = self.group + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.service_principals: + body["service_principals"] = self.service_principals return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResolveGroupResponse: - """Deserializes the ResolveGroupResponse from a dictionary.""" - return cls(group=_from_dict(d, "group", Group)) + def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalsResponse: + """Deserializes the ListServicePrincipalsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + service_principals=_repeated_dict(d, "service_principals", ServicePrincipal), + ) @dataclass -class ResolveServicePrincipalResponse: - service_principal: Optional[ServicePrincipal] = None - """The service principal that was resolved.""" +class ListUsersResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + users: Optional[List[User]] = None def as_dict(self) -> dict: - """Serializes the ResolveServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.service_principal: - body["service_principal"] = self.service_principal.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = [v.as_dict() for v in self.users] return body def as_shallow_dict(self) -> dict: - """Serializes the ResolveServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.service_principal: - body["service_principal"] = self.service_principal + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.users: + body["users"] = self.users return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResolveServicePrincipalResponse: - """Deserializes the ResolveServicePrincipalResponse from a dictionary.""" - return cls(service_principal=_from_dict(d, "service_principal", ServicePrincipal)) + def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: + """Deserializes the ListUsersResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), users=_repeated_dict(d, "users", User)) @dataclass -class ResolveUserResponse: - user: Optional[User] = None - """The user that was resolved.""" +class ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented""" + + next_page_token: Optional[str] = None + """A token, which can be sent as page_token to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + + workspace_access_details: Optional[List[WorkspaceAccessDetail]] = None def as_dict(self) -> dict: - """Serializes the ResolveUserResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListWorkspaceAccessDetailsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.user: - body["user"] = self.user.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = [v.as_dict() for v in self.workspace_access_details] return body def as_shallow_dict(self) -> dict: - """Serializes the ResolveUserResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListWorkspaceAccessDetailsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.user: - body["user"] = self.user + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.workspace_access_details: + body["workspace_access_details"] = self.workspace_access_details return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResolveUserResponse: - """Deserializes the ResolveUserResponse from a dictionary.""" - return cls(user=_from_dict(d, "user", User)) + def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceAccessDetailsResponse: + """Deserializes the ListWorkspaceAccessDetailsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + workspace_access_details=_repeated_dict(d, "workspace_access_details", WorkspaceAccessDetail), + ) + + +class PrincipalType(Enum): + """The type of the principal (user/sp/group).""" + + GROUP = "GROUP" + SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL" + USER = "USER" @dataclass @@ -159,7 +226,7 @@ class ServicePrincipal: """The parent account ID for the service principal in Databricks.""" account_sp_status: Optional[State] = None - """The activity status of a service principal in a Databricks account.""" + """The activity status of a sp in a Databricks account.""" application_id: Optional[str] = None """Application ID of the service principal.""" @@ -227,6 +294,81 @@ class State(Enum): INACTIVE = "INACTIVE" +@dataclass +class SyncGroupResponse: + group: Optional[Group] = None + """The group that was synced.""" + + def as_dict(self) -> dict: + """Serializes the SyncGroupResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.group: + body["group"] = self.group.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncGroupResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group: + body["group"] = self.group + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncGroupResponse: + """Deserializes the SyncGroupResponse from a dictionary.""" + return cls(group=_from_dict(d, "group", Group)) + + +@dataclass +class SyncServicePrincipalResponse: + service_principal: Optional[ServicePrincipal] = None + """The service principal that was synced.""" + + def as_dict(self) -> dict: + """Serializes the SyncServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.service_principal: + body["service_principal"] = self.service_principal.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_principal: + body["service_principal"] = self.service_principal + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncServicePrincipalResponse: + """Deserializes the SyncServicePrincipalResponse from a dictionary.""" + return cls(service_principal=_from_dict(d, "service_principal", ServicePrincipal)) + + +@dataclass +class SyncUserResponse: + user: Optional[User] = None + """The user that was synced.""" + + def as_dict(self) -> dict: + """Serializes the SyncUserResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.user: + body["user"] = self.user.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncUserResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.user: + body["user"] = self.user + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncUserResponse: + """Deserializes the SyncUserResponse from a dictionary.""" + return cls(user=_from_dict(d, "user", User)) + + @dataclass class User: """The details of a User resource.""" @@ -427,106 +569,76 @@ class AccountIamV2API: def __init__(self, api_client): self._api = api_client - def get_workspace_access_detail( - self, workspace_id: int, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None - ) -> WorkspaceAccessDetail: - """Returns the access details for a principal in a workspace. Allows for checking access details for any - provisioned principal (user, service principal, or group) in a workspace. * Provisioned principal here - refers to one that has been synced into Databricks from the customer's IdP or added explicitly to - Databricks via SCIM/UI. Allows for passing in a "view" parameter to control what fields are returned - (BASIC by default or FULL). + def create_group(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented - :param workspace_id: int - Required. The workspace ID for which the access details are being requested. - :param principal_id: int - Required. The internal ID of the principal (user/sp/group) for which the access details are being - requested. - :param view: :class:`WorkspaceAccessDetailView` (optional) - Controls what fields are returned. + :param group: :class:`Group` + Required. Group to be created in - :returns: :class:`WorkspaceAccessDetail` + :returns: :class:`Group` """ - - query = {} - if view is not None: - query["view"] = view.value + body = group.as_dict() headers = { "Accept": "application/json", + "Content-Type": "application/json", } res = self._api.do( - "GET", - f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", - query=query, - headers=headers, + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", body=body, headers=headers ) - return WorkspaceAccessDetail.from_dict(res) + return Group.from_dict(res) - def resolve_group(self, external_id: str) -> ResolveGroupResponse: - """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it - will be created in the account. If the customer is not onboarded onto Automatic Identity Management - (AIM), this will return an error. + def create_service_principal(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the group in the customer's IdP. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in - :returns: :class:`ResolveGroupResponse` + :returns: :class:`ServicePrincipal` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = service_principal.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } res = self._api.do( - "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/groups/resolveByExternalId", - body=body, - headers=headers, + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", body=body, headers=headers ) - return ResolveGroupResponse.from_dict(res) + return ServicePrincipal.from_dict(res) - def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipalResponse: - """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def create_user(self, user: User) -> User: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the service principal in the customer's IdP. + :param user: :class:`User` + Required. User to be created in - :returns: :class:`ResolveServicePrincipalResponse` + :returns: :class:`User` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = user.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } res = self._api.do( - "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/resolveByExternalId", - body=body, - headers=headers, + "POST", f"/api/2.0/identity/accounts/{self._api.account_id}/users", body=body, headers=headers ) - return ResolveServicePrincipalResponse.from_dict(res) + return User.from_dict(res) - def resolve_user(self, external_id: str) -> ResolveUserResponse: - """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it - will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def create_workspace_access_detail( + self, parent: str, workspace_access_detail: WorkspaceAccessDetail + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the user in the customer's IdP. + :param parent: str + Required. The parent path for workspace access detail. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be created in . - :returns: :class:`ResolveUserResponse` + :returns: :class:`WorkspaceAccessDetail` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + body = workspace_access_detail.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -534,110 +646,986 @@ def resolve_user(self, external_id: str) -> ResolveUserResponse: res = self._api.do( "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/users/resolveByExternalId", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{parent}/workspaceAccessDetails", body=body, headers=headers, ) - return ResolveUserResponse.from_dict(res) + return WorkspaceAccessDetail.from_dict(res) + def delete_group(self, internal_id: int): + """TODO: Write description later when this method is implemented -class WorkspaceIamV2API: - """These APIs are used to manage identities and the workspace access of these identities in .""" + :param internal_id: int + Required. Internal ID of the group in Databricks. - def __init__(self, api_client): - self._api = api_client - def get_workspace_access_detail_local( - self, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None - ) -> WorkspaceAccessDetail: - """Returns the access details for a principal in the current workspace. Allows for checking access - details for any provisioned principal (user, service principal, or group) in the current workspace. * - Provisioned principal here refers to one that has been synced into Databricks from the customer's IdP - or added explicitly to Databricks via SCIM/UI. Allows for passing in a "view" parameter to control - what fields are returned (BASIC by default or FULL). + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers + ) + + def delete_service_principal(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + headers=headers, + ) + def delete_user(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + + def delete_workspace_access_detail(self, workspace_id: int, principal_id: int): + """TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID where the principal has access. :param principal_id: int - Required. The internal ID of the principal (user/sp/group) for which the access details are being - requested. - :param view: :class:`WorkspaceAccessDetailView` (optional) - Controls what fields are returned. + Required. ID of the principal in Databricks to delete workspace access for. + - :returns: :class:`WorkspaceAccessDetail` """ - query = {} - if view is not None: - query["view"] = view.value headers = { "Accept": "application/json", } - res = self._api.do( - "GET", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, headers=headers + self._api.do( + "DELETE", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + headers=headers, ) - return WorkspaceAccessDetail.from_dict(res) - def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: - """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it - will be created in the account. If the customer is not onboarded onto Automatic Identity Management - (AIM), this will return an error. + def get_group(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the group in the customer's IdP. + :param internal_id: int + Required. Internal ID of the group in Databricks. - :returns: :class:`ResolveGroupResponse` + :returns: :class:`Group` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + headers = { "Accept": "application/json", - "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/identity/groups/resolveByExternalId", body=body, headers=headers) - return ResolveGroupResponse.from_dict(res) + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", headers=headers + ) + return Group.from_dict(res) - def resolve_service_principal_proxy(self, external_id: str) -> ResolveServicePrincipalResponse: - """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def get_service_principal(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the service principal in the customer's IdP. + :param internal_id: int + Required. Internal ID of the service principal in Databricks. - :returns: :class:`ResolveServicePrincipalResponse` + :returns: :class:`ServicePrincipal` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + headers = { "Accept": "application/json", - "Content-Type": "application/json", } res = self._api.do( - "POST", "/api/2.0/identity/servicePrincipals/resolveByExternalId", body=body, headers=headers + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", headers=headers ) - return ResolveServicePrincipalResponse.from_dict(res) + return ServicePrincipal.from_dict(res) - def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: - """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it - will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. + def get_user(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented - :param external_id: str - Required. The external ID of the user in the customer's IdP. + :param internal_id: int + Required. Internal ID of the user in Databricks. - :returns: :class:`ResolveUserResponse` + :returns: :class:`User` """ - body = {} - if external_id is not None: - body["external_id"] = external_id + headers = { "Accept": "application/json", - "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/identity/users/resolveByExternalId", body=body, headers=headers) - return ResolveUserResponse.from_dict(res) + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", headers=headers + ) + return User.from_dict(res) + + def get_workspace_access_detail( + self, workspace_id: int, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None + ) -> WorkspaceAccessDetail: + """Returns the access details for a principal in a workspace. Allows for checking access details for any + provisioned principal (user, service principal, or group) in a workspace. * Provisioned principal here + refers to one that has been synced into Databricks from the customer's IdP or added explicitly to + Databricks via SCIM/UI. Allows for passing in a "view" parameter to control what fields are returned + (BASIC by default or FULL). + + :param workspace_id: int + Required. The workspace ID for which the access details are being requested. + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the access details are being + requested. + :param view: :class:`WorkspaceAccessDetailView` (optional) + Controls what fields are returned. + + :returns: :class:`WorkspaceAccessDetail` + """ + + query = {} + if view is not None: + query["view"] = view.value + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + query=query, + headers=headers, + ) + return WorkspaceAccessDetail.from_dict(res) + + def list_groups(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/groups", query=query, headers=headers + ) + return ListGroupsResponse.from_dict(res) + + def list_service_principals( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of service principals to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals", query=query, headers=headers + ) + return ListServicePrincipalsResponse.from_dict(res) + + def list_users(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/accounts/{self._api.account_id}/users", query=query, headers=headers + ) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details( + self, workspace_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param workspace_id: int + The workspace ID for which the workspace access details are being fetched. + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails", + query=query, + headers=headers, + ) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + + def sync_group(self, external_id: str) -> SyncGroupResponse: + """Syncs a group with the given external ID from the customer's IdP. If the group does not exist, it will + be created in the account. If the customer is not onboarded onto Automatic Identity Management (AIM), + this will return an error. Synced information is cached for 30 minutes, so subsequent calls to this + method will not result in a full sync unless the cache is stale. If this is triggered while the cache + is still valid, it will return the cached group information. + + :param external_id: str + Required. The external ID of the group in the customer's IdP. + + :returns: :class:`SyncGroupResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/syncByExternalId", + body=body, + headers=headers, + ) + return SyncGroupResponse.from_dict(res) + + def sync_service_principal(self, external_id: str) -> SyncServicePrincipalResponse: + """Syncs a sp with the given external ID from the customer's IdP. If the sp does not exist, it will be + created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will return + an error. Synced information is cached for 30 minutes, so subsequent calls to this method will not + result in a full sync unless the cache is stale. If this is triggered while the cache is still valid, + it will return the cached SP information. + + :param external_id: str + Required. The external ID of the service principal in the customer's IdP. + + :returns: :class:`SyncServicePrincipalResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/syncByExternalId", + body=body, + headers=headers, + ) + return SyncServicePrincipalResponse.from_dict(res) + + def sync_user(self, external_id: str) -> SyncUserResponse: + """Syncs a user with the given external ID from the customer's IdP. If the user does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. Synced information is cached for 30 minutes, so subsequent calls to this method will + not result in a full sync unless the cache is stale. If this is triggered while the cache is still + valid, it will return the cached user information. + + :param external_id: str + Required. The external ID of the user in the customer's IdP. + + :returns: :class:`SyncUserResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/syncByExternalId", + body=body, + headers=headers, + ) + return SyncUserResponse.from_dict(res) + + def update_group(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return Group.from_dict(res) + + def update_service_principal( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service Principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return ServicePrincipal.from_dict(res) + + def update_user(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/{internal_id}", + query=query, + body=body, + headers=headers, + ) + return User.from_dict(res) + + def update_workspace_access_detail( + self, workspace_id: int, principal_id: int, workspace_access_detail: WorkspaceAccessDetail, update_mask: str + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param workspace_id: int + Required. The workspace ID for which the workspace access detail is being updated. + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/identity/accounts/{self._api.account_id}/workspaces/{workspace_id}/workspaceAccessDetails/{principal_id}", + query=query, + body=body, + headers=headers, + ) + return WorkspaceAccessDetail.from_dict(res) + + +class WorkspaceIamV2API: + """These APIs are used to manage identities and the workspace access of these identities in .""" + + def __init__(self, api_client): + self._api = api_client + + def create_group_proxy(self, group: Group) -> Group: + """TODO: Write description later when this method is implemented + + :param group: :class:`Group` + Required. Group to be created in + + :returns: :class:`Group` + """ + body = group.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/groups", body=body, headers=headers) + return Group.from_dict(res) + + def create_service_principal_proxy(self, service_principal: ServicePrincipal) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be created in + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/servicePrincipals", body=body, headers=headers) + return ServicePrincipal.from_dict(res) + + def create_user_proxy(self, user: User) -> User: + """TODO: Write description later when this method is implemented + + :param user: :class:`User` + Required. User to be created in + + :returns: :class:`User` + """ + body = user.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/users", body=body, headers=headers) + return User.from_dict(res) + + def create_workspace_access_detail_local( + self, workspace_access_detail: WorkspaceAccessDetail + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. Workspace access detail to be created in . + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/workspaceAccessDetails", body=body, headers=headers) + return WorkspaceAccessDetail.from_dict(res) + + def delete_group_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + + def delete_service_principal_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + + def delete_user_proxy(self, internal_id: int): + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/users/{internal_id}", headers=headers) + + def delete_workspace_access_detail_local(self, principal_id: int): + """TODO: Write description later when this method is implemented + + :param principal_id: int + Required. ID of the principal in Databricks. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", headers=headers) + + def get_group_proxy(self, internal_id: int) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/groups/{internal_id}", headers=headers) + return Group.from_dict(res) + + def get_service_principal_proxy(self, internal_id: int) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/servicePrincipals/{internal_id}", headers=headers) + return ServicePrincipal.from_dict(res) + + def get_user_proxy(self, internal_id: int) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/identity/users/{internal_id}", headers=headers) + return User.from_dict(res) + + def get_workspace_access_detail_local( + self, principal_id: int, *, view: Optional[WorkspaceAccessDetailView] = None + ) -> WorkspaceAccessDetail: + """Returns the access details for a principal in the current workspace. Allows for checking access + details for any provisioned principal (user, service principal, or group) in the current workspace. * + Provisioned principal here refers to one that has been synced into Databricks from the customer's IdP + or added explicitly to Databricks via SCIM/UI. Allows for passing in a "view" parameter to control + what fields are returned (BASIC by default or FULL). + + :param principal_id: int + Required. The internal ID of the principal (user/sp/group) for which the access details are being + requested. + :param view: :class:`WorkspaceAccessDetailView` (optional) + Controls what fields are returned. + + :returns: :class:`WorkspaceAccessDetail` + """ + + query = {} + if view is not None: + query["view"] = view.value + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, headers=headers + ) + return WorkspaceAccessDetail.from_dict(res) + + def list_groups_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListGroupsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of groups to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListGroups call. Provide this to retrieve the subsequent + page. + + :returns: :class:`ListGroupsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/groups", query=query, headers=headers) + return ListGroupsResponse.from_dict(res) + + def list_service_principals_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListServicePrincipalsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of sps to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListServicePrincipalsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/servicePrincipals", query=query, headers=headers) + return ListServicePrincipalsResponse.from_dict(res) + + def list_users_proxy( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListUsersResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of users to return. The service may return fewer than this value. + :param page_token: str (optional) + A page token, received from a previous ListUsers call. Provide this to retrieve the subsequent page. + + :returns: :class:`ListUsersResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/users", query=query, headers=headers) + return ListUsersResponse.from_dict(res) + + def list_workspace_access_details_local( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> ListWorkspaceAccessDetailsResponse: + """TODO: Write description later when this method is implemented + + :param page_size: int (optional) + The maximum number of workspace access details to return. The service may return fewer than this + value. + :param page_token: str (optional) + A page token, received from a previous ListWorkspaceAccessDetails call. Provide this to retrieve the + subsequent page. + + :returns: :class:`ListWorkspaceAccessDetailsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/identity/workspaceAccessDetails", query=query, headers=headers) + return ListWorkspaceAccessDetailsResponse.from_dict(res) + + def sync_group_proxy(self, external_id: str) -> SyncGroupResponse: + """Syncs a group with the given external ID from the customer's IdP. If the group does not exist, it will + be created in the account. If the customer is not onboarded onto Automatic Identity Management (AIM), + this will return an error. Synced information is cached for 30 minutes, so subsequent calls to this + method will not result in a full sync unless the cache is stale. If this is triggered while the cache + is still valid, it will return the cached group information. + + :param external_id: str + Required. The external ID of the group in the customer's IdP. + + :returns: :class:`SyncGroupResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/groups/syncByExternalId", body=body, headers=headers) + return SyncGroupResponse.from_dict(res) + + def sync_service_principal_proxy(self, external_id: str) -> SyncServicePrincipalResponse: + """Syncs a sp with the given external ID from the customer's IdP. If the sp does not exist, it will be + created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will return + an error. Synced information is cached for 30 minutes, so subsequent calls to this method will not + result in a full sync unless the cache is stale. If this is triggered while the cache is still valid, + it will return the cached SP information. + + :param external_id: str + Required. The external ID of the service principal in the customer's IdP. + + :returns: :class:`SyncServicePrincipalResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/servicePrincipals/syncByExternalId", body=body, headers=headers) + return SyncServicePrincipalResponse.from_dict(res) + + def sync_user_proxy(self, external_id: str) -> SyncUserResponse: + """Syncs a user with the given external ID from the customer's IdP. If the user does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. Synced information is cached for 30 minutes, so subsequent calls to this method will + not result in a full sync unless the cache is stale. If this is triggered while the cache is still + valid, it will return the cached user information. + + :param external_id: str + Required. The external ID of the user in the customer's IdP. + + :returns: :class:`SyncUserResponse` + """ + body = {} + if external_id is not None: + body["external_id"] = external_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/identity/users/syncByExternalId", body=body, headers=headers) + return SyncUserResponse.from_dict(res) + + def update_group_proxy(self, internal_id: int, group: Group, update_mask: str) -> Group: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the group in Databricks. + :param group: :class:`Group` + Required. Group to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`Group` + """ + body = group.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/identity/groups/{internal_id}", query=query, body=body, headers=headers) + return Group.from_dict(res) + + def update_service_principal_proxy( + self, internal_id: int, service_principal: ServicePrincipal, update_mask: str + ) -> ServicePrincipal: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the service principal in Databricks. + :param service_principal: :class:`ServicePrincipal` + Required. Service principal to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`ServicePrincipal` + """ + body = service_principal.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/identity/servicePrincipals/{internal_id}", query=query, body=body, headers=headers + ) + return ServicePrincipal.from_dict(res) + + def update_user_proxy(self, internal_id: int, user: User, update_mask: str) -> User: + """TODO: Write description later when this method is implemented + + :param internal_id: int + Required. Internal ID of the user in Databricks. + :param user: :class:`User` + Required. User to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`User` + """ + body = user.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/identity/users/{internal_id}", query=query, body=body, headers=headers) + return User.from_dict(res) + + def update_workspace_access_detail_local( + self, principal_id: int, workspace_access_detail: WorkspaceAccessDetail, update_mask: str + ) -> WorkspaceAccessDetail: + """TODO: Write description later when this method is implemented + + :param principal_id: int + Required. ID of the principal in Databricks. + :param workspace_access_detail: :class:`WorkspaceAccessDetail` + Required. WorkspaceAccessDetail to be updated in + :param update_mask: str + Optional. The list of fields to update. + + :returns: :class:`WorkspaceAccessDetail` + """ + body = workspace_access_detail.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/identity/workspaceAccessDetails/{principal_id}", query=query, body=body, headers=headers + ) + return WorkspaceAccessDetail.from_dict(res) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 9519e8ba7..729942d5c 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -42,9 +42,6 @@ class BaseJob: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - effective_usage_policy_id: Optional[str] = None - """The id of the usage policy used by this job for cost attribution purposes.""" - has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 @@ -69,8 +66,6 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: @@ -90,8 +85,6 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: @@ -109,7 +102,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseJob: created_time=d.get("created_time", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), - effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), settings=_from_dict(d, "settings", JobSettings), @@ -155,9 +147,6 @@ class BaseRun: `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - effective_usage_policy_id: Optional[str] = None - """The id of the usage policy used by this run for cost attribution purposes.""" - end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -278,8 +267,6 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -351,8 +338,6 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -418,7 +403,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: creator_user_name=d.get("creator_user_name", None), description=d.get("description", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - effective_usage_policy_id=d.get("effective_usage_policy_id", None), end_time=d.get("end_time", None), execution_duration=d.get("execution_duration", None), git_source=_from_dict(d, "git_source", GitSource), @@ -1651,7 +1635,9 @@ class ExportRunOutput: views: Optional[List[ViewItem]] = None """The exported content in HTML format (one for every view item). To extract the HTML notebook from - the JSON response, download and run this [Python script](/_static/examples/extract.py).""" + the JSON response, download and run this [Python script]. + + [Python script]: https://docs.databricks.com/en/_static/examples/extract.py""" def as_dict(self) -> dict: """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body.""" @@ -2236,9 +2222,6 @@ class Job: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - effective_usage_policy_id: Optional[str] = None - """The id of the usage policy used by this job for cost attribution purposes.""" - has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 @@ -2275,8 +2258,6 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: @@ -2300,8 +2281,6 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: @@ -2323,7 +2302,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Job: created_time=d.get("created_time", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), - effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), next_page_token=d.get("next_page_token", None), @@ -3071,8 +3049,8 @@ class JobSettings: usage_policy_id: Optional[str] = None """The id of the user specified usage policy to use for this job. If not specified, a default usage - policy may be applied when creating or modifying the job. See `effective_usage_policy_id` for - the usage policy used by this workload.""" + policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for + the budget policy used by this workload.""" webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" @@ -3528,6 +3506,78 @@ def from_dict(cls, d: Dict[str, Any]) -> ListRunsResponse: ) +@dataclass +class ModelTriggerConfiguration: + condition: ModelTriggerConfigurationCondition + """The condition based on which to trigger a job run.""" + + aliases: Optional[List[str]] = None + """Aliases of the model versions to monitor. Can only be used in conjunction with condition + MODEL_ALIAS_SET.""" + + min_time_between_triggers_seconds: Optional[int] = None + """If set, the trigger starts a run only after the specified amount of time has passed since the + last time the trigger fired. The minimum allowed value is 60 seconds.""" + + securable_name: Optional[str] = None + """Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level + triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of + metastore-level triggers.""" + + wait_after_last_change_seconds: Optional[int] = None + """If set, the trigger starts a run only after no model updates have occurred for the specified + time and can be used to wait for a series of model updates before triggering a run. The minimum + allowed value is 60 seconds.""" + + def as_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aliases: + body["aliases"] = [v for v in self.aliases] + if self.condition is not None: + body["condition"] = self.condition.value + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ModelTriggerConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aliases: + body["aliases"] = self.aliases + if self.condition is not None: + body["condition"] = self.condition + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ModelTriggerConfiguration: + """Deserializes the ModelTriggerConfiguration from a dictionary.""" + return cls( + aliases=d.get("aliases", None), + condition=_enum(d, "condition", ModelTriggerConfigurationCondition), + min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), + securable_name=d.get("securable_name", None), + wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), + ) + + +class ModelTriggerConfigurationCondition(Enum): + + MODEL_ALIAS_SET = "MODEL_ALIAS_SET" + MODEL_CREATED = "MODEL_CREATED" + MODEL_VERSION_READY = "MODEL_VERSION_READY" + + @dataclass class NotebookOutput: result: Optional[str] = None @@ -4541,9 +4591,6 @@ class Run: `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - effective_usage_policy_id: Optional[str] = None - """The id of the usage policy used by this run for cost attribution purposes.""" - end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -4670,8 +4717,6 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -4747,8 +4792,6 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target - if self.effective_usage_policy_id is not None: - body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -4818,7 +4861,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Run: creator_user_name=d.get("creator_user_name", None), description=d.get("description", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - effective_usage_policy_id=d.get("effective_usage_policy_id", None), end_time=d.get("end_time", None), execution_duration=d.get("execution_duration", None), git_source=_from_dict(d, "git_source", GitSource), @@ -5657,7 +5699,7 @@ class RunTask: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. @@ -5694,6 +5736,9 @@ class RunTask: description: Optional[str] = None """An optional description for this task.""" + disabled: Optional[bool] = None + """Deprecated, field was never used in production.""" + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -5805,9 +5850,21 @@ class RunTask: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. - Databricks recommends using the spark_jar_task instead; see [Spark Submit task for - jobs](/jobs/spark-submit).""" + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. + + In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use + `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark + configurations. + + `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you + _cannot_ specify them in parameters. + + By default, the Spark submit job uses all available memory (excluding reserved memory for + Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value + to leave some room for off-heap usage. + + The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -5856,6 +5913,8 @@ def as_dict(self) -> dict: body["depends_on"] = [v.as_dict() for v in self.depends_on] if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value if self.email_notifications: @@ -5953,6 +6012,8 @@ def as_shallow_dict(self) -> dict: body["depends_on"] = self.depends_on if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target if self.email_notifications: @@ -6040,6 +6101,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTask: dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), + disabled=d.get("disabled", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), end_time=d.get("end_time", None), @@ -6894,7 +6956,7 @@ class SubmitTask: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the @@ -6981,9 +7043,21 @@ class SubmitTask: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. - Databricks recommends using the spark_jar_task instead; see [Spark Submit task for - jobs](/jobs/spark-submit).""" + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. + + In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use + `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark + configurations. + + `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you + _cannot_ specify them in parameters. + + By default, the Spark submit job uses all available memory (excluding reserved memory for + Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value + to leave some room for off-heap usage. + + The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -7373,7 +7447,7 @@ class Task: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the @@ -7490,9 +7564,21 @@ class Task: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. - Databricks recommends using the spark_jar_task instead; see [Spark Submit task for - jobs](/jobs/spark-submit).""" + """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + This task can run only on new clusters and is not compatible with serverless compute. + + In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use + `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark + configurations. + + `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you + _cannot_ specify them in parameters. + + By default, the Spark submit job uses all available memory (excluding reserved memory for + Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value + to leave some room for off-heap usage. + + The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -7909,8 +7995,6 @@ class TerminationCodeCode(Enum): run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. - * `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will - be retried with a mitigation config. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" @@ -8030,6 +8114,8 @@ class TriggerSettings: file_arrival: Optional[FileArrivalTriggerConfiguration] = None """File arrival trigger settings.""" + model: Optional[ModelTriggerConfiguration] = None + pause_status: Optional[PauseStatus] = None """Whether this trigger is paused or not.""" @@ -8046,6 +8132,8 @@ def as_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival.as_dict() + if self.model: + body["model"] = self.model.as_dict() if self.pause_status is not None: body["pause_status"] = self.pause_status.value if self.periodic: @@ -8061,6 +8149,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.file_arrival: body["file_arrival"] = self.file_arrival + if self.model: + body["model"] = self.model if self.pause_status is not None: body["pause_status"] = self.pause_status if self.periodic: @@ -8076,6 +8166,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" return cls( file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), + model=_from_dict(d, "model", ModelTriggerConfiguration), pause_status=_enum(d, "pause_status", PauseStatus), periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), table=_from_dict(d, "table", TableUpdateTriggerConfiguration), @@ -8561,8 +8652,8 @@ def create( `runNow`. :param usage_policy_id: str (optional) The id of the user specified usage policy to use for this job. If not specified, a default usage - policy may be applied when creating or modifying the job. See `effective_usage_policy_id` for the - usage policy used by this workload. + policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the + budget policy used by this workload. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 6a132360c..e83b3dd6f 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -512,30 +512,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateWebhookResponse: return cls(webhook=_from_dict(d, "webhook", RegistryWebhook)) -@dataclass -class DataSource: - delta_table_source: Optional[DeltaTableSource] = None - - def as_dict(self) -> dict: - """Serializes the DataSource into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.delta_table_source: - body["delta_table_source"] = self.delta_table_source.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DataSource into a shallow dictionary of its immediate attributes.""" - body = {} - if self.delta_table_source: - body["delta_table_source"] = self.delta_table_source - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DataSource: - """Deserializes the DataSource from a dictionary.""" - return cls(delta_table_source=_from_dict(d, "delta_table_source", DeltaTableSource)) - - @dataclass class Dataset: """Dataset. Represents a reference to data used for training, testing, or evaluation during the @@ -892,49 +868,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteWebhookResponse: return cls() -@dataclass -class DeltaTableSource: - full_name: str - """The full three-part (catalog, schema, table) name of the Delta table.""" - - entity_columns: List[str] - """The entity columns of the Delta table.""" - - timeseries_column: str - """The timeseries column of the Delta table.""" - - def as_dict(self) -> dict: - """Serializes the DeltaTableSource into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.entity_columns: - body["entity_columns"] = [v for v in self.entity_columns] - if self.full_name is not None: - body["full_name"] = self.full_name - if self.timeseries_column is not None: - body["timeseries_column"] = self.timeseries_column - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeltaTableSource into a shallow dictionary of its immediate attributes.""" - body = {} - if self.entity_columns: - body["entity_columns"] = self.entity_columns - if self.full_name is not None: - body["full_name"] = self.full_name - if self.timeseries_column is not None: - body["timeseries_column"] = self.timeseries_column - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSource: - """Deserializes the DeltaTableSource from a dictionary.""" - return cls( - entity_columns=d.get("entity_columns", None), - full_name=d.get("full_name", None), - timeseries_column=d.get("timeseries_column", None), - ) - - @dataclass class Experiment: """An experiment and its metadata.""" @@ -1279,68 +1212,46 @@ def from_dict(cls, d: Dict[str, Any]) -> ExperimentTag: @dataclass class Feature: - full_name: str - """The full three-part name (catalog, schema, name) of the feature.""" - - source: DataSource - """The data source of the feature.""" - - inputs: List[str] - """The input columns from which the feature is computed.""" + """Feature for model version.""" - function: Function - """The function by which the feature is computed.""" + feature_name: Optional[str] = None + """Feature name""" - time_window: TimeWindow - """The time window in which the feature is computed.""" + feature_table_id: Optional[str] = None + """Feature table id""" - description: Optional[str] = None - """The description of the feature.""" + feature_table_name: Optional[str] = None + """Feature table name""" def as_dict(self) -> dict: """Serializes the Feature into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function: - body["function"] = self.function.as_dict() - if self.inputs: - body["inputs"] = [v for v in self.inputs] - if self.source: - body["source"] = self.source.as_dict() - if self.time_window: - body["time_window"] = self.time_window.as_dict() + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name return body def as_shallow_dict(self) -> dict: """Serializes the Feature into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function: - body["function"] = self.function - if self.inputs: - body["inputs"] = self.inputs - if self.source: - body["source"] = self.source - if self.time_window: - body["time_window"] = self.time_window + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Feature: """Deserializes the Feature from a dictionary.""" return cls( - description=d.get("description", None), - full_name=d.get("full_name", None), - function=_from_dict(d, "function", Function), - inputs=d.get("inputs", None), - source=_from_dict(d, "source", DataSource), - time_window=_from_dict(d, "time_window", TimeWindow), + feature_name=d.get("feature_name", None), + feature_table_id=d.get("feature_table_id", None), + feature_table_name=d.get("feature_table_name", None), ) @@ -1480,7 +1391,7 @@ def from_dict(cls, d: Dict[str, Any]) -> FeatureLineageOnlineFeature: class FeatureList: """Feature list wrap all the features for a model version""" - features: Optional[List[LinkedFeature]] = None + features: Optional[List[Feature]] = None def as_dict(self) -> dict: """Serializes the FeatureList into a dictionary suitable for use as a JSON request body.""" @@ -1499,7 +1410,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> FeatureList: """Deserializes the FeatureList from a dictionary.""" - return cls(features=_repeated_dict(d, "features", LinkedFeature)) + return cls(features=_repeated_dict(d, "features", Feature)) @dataclass @@ -1654,90 +1565,6 @@ class ForecastingExperimentState(Enum): SUCCEEDED = "SUCCEEDED" -@dataclass -class Function: - function_type: FunctionFunctionType - """The type of the function.""" - - extra_parameters: Optional[List[FunctionExtraParameter]] = None - """Extra parameters for parameterized functions.""" - - def as_dict(self) -> dict: - """Serializes the Function into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.extra_parameters: - body["extra_parameters"] = [v.as_dict() for v in self.extra_parameters] - if self.function_type is not None: - body["function_type"] = self.function_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Function into a shallow dictionary of its immediate attributes.""" - body = {} - if self.extra_parameters: - body["extra_parameters"] = self.extra_parameters - if self.function_type is not None: - body["function_type"] = self.function_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Function: - """Deserializes the Function from a dictionary.""" - return cls( - extra_parameters=_repeated_dict(d, "extra_parameters", FunctionExtraParameter), - function_type=_enum(d, "function_type", FunctionFunctionType), - ) - - -@dataclass -class FunctionExtraParameter: - key: str - """The name of the parameter.""" - - value: str - """The value of the parameter.""" - - def as_dict(self) -> dict: - """Serializes the FunctionExtraParameter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the FunctionExtraParameter into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionExtraParameter: - """Deserializes the FunctionExtraParameter from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - -class FunctionFunctionType(Enum): - - APPROX_COUNT_DISTINCT = "APPROX_COUNT_DISTINCT" - APPROX_PERCENTILE = "APPROX_PERCENTILE" - AVG = "AVG" - COUNT = "COUNT" - FIRST = "FIRST" - LAST = "LAST" - MAX = "MAX" - MIN = "MIN" - STDDEV_POP = "STDDEV_POP" - STDDEV_SAMP = "STDDEV_SAMP" - SUM = "SUM" - VAR_POP = "VAR_POP" - VAR_SAMP = "VAR_SAMP" - - @dataclass class GetExperimentByNameResponse: experiment: Optional[Experiment] = None @@ -1864,6 +1691,31 @@ def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) +@dataclass +class GetLoggedModelsRequestResponse: + models: Optional[List[LoggedModel]] = None + """The retrieved logged models.""" + + def as_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.models: + body["models"] = [v.as_dict() for v in self.models] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetLoggedModelsRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.models: + body["models"] = self.models + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelsRequestResponse: + """Deserializes the GetLoggedModelsRequestResponse from a dictionary.""" + return cls(models=_repeated_dict(d, "models", LoggedModel)) + + @dataclass class GetMetricHistoryResponse: metrics: Optional[List[Metric]] = None @@ -2227,51 +2079,6 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSpecWithoutSecret: return cls(job_id=d.get("job_id", None), workspace_url=d.get("workspace_url", None)) -@dataclass -class LinkedFeature: - """Feature for model version. ([ML-57150] Renamed from Feature to LinkedFeature)""" - - feature_name: Optional[str] = None - """Feature name""" - - feature_table_id: Optional[str] = None - """Feature table id""" - - feature_table_name: Optional[str] = None - """Feature table name""" - - def as_dict(self) -> dict: - """Serializes the LinkedFeature into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.feature_name is not None: - body["feature_name"] = self.feature_name - if self.feature_table_id is not None: - body["feature_table_id"] = self.feature_table_id - if self.feature_table_name is not None: - body["feature_table_name"] = self.feature_table_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LinkedFeature into a shallow dictionary of its immediate attributes.""" - body = {} - if self.feature_name is not None: - body["feature_name"] = self.feature_name - if self.feature_table_id is not None: - body["feature_table_id"] = self.feature_table_id - if self.feature_table_name is not None: - body["feature_table_name"] = self.feature_table_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LinkedFeature: - """Deserializes the LinkedFeature from a dictionary.""" - return cls( - feature_name=d.get("feature_name", None), - feature_table_id=d.get("feature_table_id", None), - feature_table_name=d.get("feature_table_name", None), - ) - - @dataclass class ListArtifactsResponse: files: Optional[List[FileInfo]] = None @@ -2385,38 +2192,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListFeatureTagsResponse: ) -@dataclass -class ListFeaturesResponse: - features: Optional[List[Feature]] = None - """List of features.""" - - next_page_token: Optional[str] = None - """Pagination token to request the next page of results for this query.""" - - def as_dict(self) -> dict: - """Serializes the ListFeaturesResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.features: - body["features"] = [v.as_dict() for v in self.features] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListFeaturesResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.features: - body["features"] = self.features - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListFeaturesResponse: - """Deserializes the ListFeaturesResponse from a dictionary.""" - return cls(features=_repeated_dict(d, "features", Feature), next_page_token=d.get("next_page_token", None)) - - @dataclass class ListModelsResponse: next_page_token: Optional[str] = None @@ -3744,8 +3519,10 @@ class PublishSpec: online_table_name: str """The full three-part (catalog, schema, table) name of the online table.""" - publish_mode: PublishSpecPublishMode - """The publish mode of the pipeline that syncs the online table with the source table.""" + publish_mode: Optional[PublishSpecPublishMode] = None + """The publish mode of the pipeline that syncs the online table with the source table. Defaults to + TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed + (CDF) enabled.""" def as_dict(self) -> dict: """Serializes the PublishSpec into a dictionary suitable for use as a JSON request body.""" @@ -3782,7 +3559,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishSpec: class PublishSpecPublishMode(Enum): CONTINUOUS = "CONTINUOUS" - SNAPSHOT = "SNAPSHOT" TRIGGERED = "TRIGGERED" @@ -4984,38 +4760,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: return cls(body=d.get("body", None), status_code=d.get("status_code", None)) -@dataclass -class TimeWindow: - duration: str - """The duration of the time window.""" - - offset: Optional[str] = None - """The offset of the time window.""" - - def as_dict(self) -> dict: - """Serializes the TimeWindow into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.duration is not None: - body["duration"] = self.duration - if self.offset is not None: - body["offset"] = self.offset - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TimeWindow into a shallow dictionary of its immediate attributes.""" - body = {} - if self.duration is not None: - body["duration"] = self.duration - if self.offset is not None: - body["offset"] = self.offset - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TimeWindow: - """Deserializes the TimeWindow from a dictionary.""" - return cls(duration=d.get("duration", None), offset=d.get("offset", None)) - - @dataclass class TransitionRequest: """For activities, this contains the activity recorded for the action. For comments, this contains @@ -5670,6 +5414,25 @@ def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) return GetLoggedModelResponse.from_dict(res) + def get_logged_models(self, *, model_ids: Optional[List[str]] = None) -> GetLoggedModelsRequestResponse: + """Batch endpoint for getting logged models from a list of model IDs + + :param model_ids: List[str] (optional) + The IDs of the logged models to retrieve. Max threshold is 100. + + :returns: :class:`GetLoggedModelsRequestResponse` + """ + + query = {} + if model_ids is not None: + query["model_ids"] = [v for v in model_ids] + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/logged-models:batchGet", query=query, headers=headers) + return GetLoggedModelsRequestResponse.from_dict(res) + def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: """Gets the permission levels that a user can have on an object. @@ -6538,116 +6301,6 @@ def update_run( return UpdateRunResponse.from_dict(res) -class FeatureEngineeringAPI: - """[description]""" - - def __init__(self, api_client): - self._api = api_client - - def create_feature(self, feature: Feature) -> Feature: - """Create a Feature. - - :param feature: :class:`Feature` - Feature to create. - - :returns: :class:`Feature` - """ - body = feature.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/feature-engineering/features", body=body, headers=headers) - return Feature.from_dict(res) - - def delete_feature(self, full_name: str): - """Delete a Feature. - - :param full_name: str - Name of the feature to delete. - - - """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers) - - def get_feature(self, full_name: str) -> Feature: - """Get a Feature. - - :param full_name: str - Name of the feature to get. - - :returns: :class:`Feature` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers) - return Feature.from_dict(res) - - def list_features(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Feature]: - """List Features. - - :param page_size: int (optional) - The maximum number of results to return. - :param page_token: str (optional) - Pagination token to go to the next page based on a previous query. - - :returns: Iterator over :class:`Feature` - """ - - query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do("GET", "/api/2.0/feature-engineering/features", query=query, headers=headers) - if "features" in json: - for v in json["features"]: - yield Feature.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_feature(self, full_name: str, feature: Feature, update_mask: str) -> Feature: - """Update a Feature. - - :param full_name: str - The full three-part name (catalog, schema, name) of the feature. - :param feature: :class:`Feature` - Feature to update. - :param update_mask: str - The list of fields to update. - - :returns: :class:`Feature` - """ - body = feature.as_dict() - query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/feature-engineering/features/{full_name}", query=query, body=body, headers=headers - ) - return Feature.from_dict(res) - - class FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features. Using a feature store also ensures that the code used to compute feature values is the same during model diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 58c57808d..148374800 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -232,11 +232,11 @@ class FederationPolicy: oidc_policy: Optional[OidcFederationPolicy] = None policy_id: Optional[str] = None - """The ID of the federation policy. Output only.""" + """The ID of the federation policy.""" service_principal_id: Optional[int] = None - """The service principal ID that this federation policy applies to. Output only. Only set for - service principal federation policies.""" + """The service principal ID that this federation policy applies to. Only set for service principal + federation policies.""" uid: Optional[str] = None """Unique, immutable id of the federation policy.""" @@ -863,17 +863,27 @@ def from_dict(cls, d: Dict[str, Any]) -> SecretInfo: @dataclass class TokenAccessPolicy: + absolute_session_lifetime_in_minutes: Optional[int] = None + """absolute OAuth session TTL in minutes when single-use refresh tokens are enabled""" + access_token_ttl_in_minutes: Optional[int] = None """access token time to live in minutes""" + enable_single_use_refresh_tokens: Optional[bool] = None + """whether to enable single-use refresh tokens""" + refresh_token_ttl_in_minutes: Optional[int] = None """refresh token time to live in minutes""" def as_dict(self) -> dict: """Serializes the TokenAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} + if self.absolute_session_lifetime_in_minutes is not None: + body["absolute_session_lifetime_in_minutes"] = self.absolute_session_lifetime_in_minutes if self.access_token_ttl_in_minutes is not None: body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.enable_single_use_refresh_tokens is not None: + body["enable_single_use_refresh_tokens"] = self.enable_single_use_refresh_tokens if self.refresh_token_ttl_in_minutes is not None: body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body @@ -881,8 +891,12 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} + if self.absolute_session_lifetime_in_minutes is not None: + body["absolute_session_lifetime_in_minutes"] = self.absolute_session_lifetime_in_minutes if self.access_token_ttl_in_minutes is not None: body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.enable_single_use_refresh_tokens is not None: + body["enable_single_use_refresh_tokens"] = self.enable_single_use_refresh_tokens if self.refresh_token_ttl_in_minutes is not None: body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body @@ -891,7 +905,9 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: """Deserializes the TokenAccessPolicy from a dictionary.""" return cls( + absolute_session_lifetime_in_minutes=d.get("absolute_session_lifetime_in_minutes", None), access_token_ttl_in_minutes=d.get("access_token_ttl_in_minutes", None), + enable_single_use_refresh_tokens=d.get("enable_single_use_refresh_tokens", None), refresh_token_ttl_in_minutes=d.get("refresh_token_ttl_in_minutes", None), ) diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ff309b6ae..d723956e0 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -21,6 +21,33 @@ # all definitions in this file are in alphabetical order +@dataclass +class ConnectionParameters: + source_catalog: Optional[str] = None + """Source catalog for initial connection. This is necessary for schema exploration in some database + systems like Oracle, and optional but nice-to-have in some other database systems like Postgres. + For Oracle databases, this maps to a service name.""" + + def as_dict(self) -> dict: + """Serializes the ConnectionParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectionParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConnectionParameters: + """Deserializes the ConnectionParameters from a dictionary.""" + return cls(source_catalog=d.get("source_catalog", None)) + + @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -553,6 +580,9 @@ class IngestionGatewayPipelineDefinition: """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" + connection_parameters: Optional[ConnectionParameters] = None + """Optional, Internal. Parameters required to establish an initial connection with the source.""" + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will @@ -565,6 +595,8 @@ def as_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters.as_dict() if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -580,6 +612,8 @@ def as_shallow_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -594,6 +628,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionGatewayPipelineDefinition: return cls( connection_id=d.get("connection_id", None), connection_name=d.get("connection_name", None), + connection_parameters=_from_dict(d, "connection_parameters", ConnectionParameters), gateway_storage_catalog=d.get("gateway_storage_catalog", None), gateway_storage_name=d.get("gateway_storage_name", None), gateway_storage_schema=d.get("gateway_storage_schema", None), @@ -606,14 +641,15 @@ class IngestionPipelineDefinition: """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" + ingest_from_uc_foreign_catalog: Optional[bool] = None + """Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs directly + without the need to specify a UC connection or ingestion gateway. The `source_catalog` fields in + objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.""" + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" - netsuite_jar_path: Optional[str] = None - """Netsuite only configuration. When the field is set for a netsuite connector, the jar stored in - the field will be validated and added to the classpath of pipeline's cluster.""" - objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" @@ -633,10 +669,10 @@ def as_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id - if self.netsuite_jar_path is not None: - body["netsuite_jar_path"] = self.netsuite_jar_path if self.objects: body["objects"] = [v.as_dict() for v in self.objects] if self.source_configurations: @@ -652,10 +688,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id - if self.netsuite_jar_path is not None: - body["netsuite_jar_path"] = self.netsuite_jar_path if self.objects: body["objects"] = self.objects if self.source_configurations: @@ -671,8 +707,8 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" return cls( connection_name=d.get("connection_name", None), + ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), - netsuite_jar_path=d.get("netsuite_jar_path", None), objects=_repeated_dict(d, "objects", IngestionConfig), source_configurations=_repeated_dict(d, "source_configurations", SourceConfig), source_type=_enum(d, "source_type", IngestionSourceType), @@ -739,97 +775,11 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionTableSpecifi ) -@dataclass -class IngestionPipelineDefinitionWorkdayReportParameters: - incremental: Optional[bool] = None - """(Optional) Marks the report as incremental. This field is deprecated and should not be used. Use - `parameters` instead. The incremental behavior is now controlled by the `parameters` field.""" - - parameters: Optional[Dict[str, str]] = None - """Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", - "end_date"), and the corresponding value is a SQL-like expression used to compute the parameter - value at runtime. Example: { "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) - }", "end_date": "{ current_date() - INTERVAL 1 DAY }" }""" - - report_parameters: Optional[List[IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue]] = None - """(Optional) Additional custom parameters for Workday Report This field is deprecated and should - not be used. Use `parameters` instead.""" - - def as_dict(self) -> dict: - """Serializes the IngestionPipelineDefinitionWorkdayReportParameters into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.incremental is not None: - body["incremental"] = self.incremental - if self.parameters: - body["parameters"] = self.parameters - if self.report_parameters: - body["report_parameters"] = [v.as_dict() for v in self.report_parameters] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the IngestionPipelineDefinitionWorkdayReportParameters into a shallow dictionary of its immediate attributes.""" - body = {} - if self.incremental is not None: - body["incremental"] = self.incremental - if self.parameters: - body["parameters"] = self.parameters - if self.report_parameters: - body["report_parameters"] = self.report_parameters - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayReportParameters: - """Deserializes the IngestionPipelineDefinitionWorkdayReportParameters from a dictionary.""" - return cls( - incremental=d.get("incremental", None), - parameters=d.get("parameters", None), - report_parameters=_repeated_dict( - d, "report_parameters", IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue - ), - ) - - -@dataclass -class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: - key: Optional[str] = None - """Key for the report parameter, can be a column name or other metadata""" - - value: Optional[str] = None - """Value for the report parameter. Possible values it can take are these sql functions: 1. - coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed - date, else current_offset() 2. current_date() 3. date_sub(current_date(), x) -> subtract x (some - non-negative integer) days from current date""" - - def as_dict(self) -> dict: - """Serializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: - """Deserializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - class IngestionSourceType(Enum): BIGQUERY = "BIGQUERY" CONFLUENCE = "CONFLUENCE" DYNAMICS365 = "DYNAMICS365" - FOREIGN_CATALOG = "FOREIGN_CATALOG" GA4_RAW_DATA = "GA4_RAW_DATA" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" META_MARKETING = "META_MARKETING" @@ -2510,6 +2460,24 @@ def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: ) +@dataclass +class RestorePipelineRequestResponse: + def as_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestorePipelineRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RestorePipelineRequestResponse: + """Deserializes the RestorePipelineRequestResponse from a dictionary.""" + return cls() + + @dataclass class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service @@ -2955,6 +2923,10 @@ class TableSpecificConfig: None ) + row_filter: Optional[str] = None + """(Optional, Immutable) The row filter condition to be applied to the table. It must not contain + the WHERE keyword, only the actual filter condition. It must be in DBSQL format.""" + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" @@ -2966,9 +2938,6 @@ class TableSpecificConfig: """The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.""" - workday_report_parameters: Optional[IngestionPipelineDefinitionWorkdayReportParameters] = None - """(Optional) Additional custom parameters for Workday Report""" - def as_dict(self) -> dict: """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2980,14 +2949,14 @@ def as_dict(self) -> dict: body["primary_keys"] = [v for v in self.primary_keys] if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config.as_dict() + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: body["scd_type"] = self.scd_type.value if self.sequence_by: body["sequence_by"] = [v for v in self.sequence_by] - if self.workday_report_parameters: - body["workday_report_parameters"] = self.workday_report_parameters.as_dict() return body def as_shallow_dict(self) -> dict: @@ -3001,14 +2970,14 @@ def as_shallow_dict(self) -> dict: body["primary_keys"] = self.primary_keys if self.query_based_connector_config: body["query_based_connector_config"] = self.query_based_connector_config + if self.row_filter is not None: + body["row_filter"] = self.row_filter if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: body["scd_type"] = self.scd_type if self.sequence_by: body["sequence_by"] = self.sequence_by - if self.workday_report_parameters: - body["workday_report_parameters"] = self.workday_report_parameters return body @classmethod @@ -3023,12 +2992,10 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: "query_based_connector_config", IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig, ), + row_filter=d.get("row_filter", None), salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), - workday_report_parameters=_from_dict( - d, "workday_report_parameters", IngestionPipelineDefinitionWorkdayReportParameters - ), ) @@ -3686,6 +3653,23 @@ def list_updates( res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) return ListUpdatesResponse.from_dict(res) + def restore_pipeline(self, pipeline_id: str) -> RestorePipelineRequestResponse: + """* Restores a pipeline that was previously deleted, if within the restoration window. All tables + deleted at pipeline deletion will be undropped as well. + + :param pipeline_id: str + The ID of the pipeline to restore + + :returns: :class:`RestorePipelineRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/restore", headers=headers) + return RestorePipelineRequestResponse.from_dict(res) + def set_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index a6fab7023..b507697a9 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -17,6 +17,9 @@ @dataclass class AnomalyDetectionConfig: + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + last_run_id: Optional[str] = None """Run id of the last run of the workflow""" @@ -26,6 +29,8 @@ class AnomalyDetectionConfig: def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type.value if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -35,6 +40,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.job_type is not None: + body["job_type"] = self.job_type if self.last_run_id is not None: body["last_run_id"] = self.last_run_id if self.latest_run_status is not None: @@ -45,11 +52,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" return cls( + job_type=_enum(d, "job_type", AnomalyDetectionJobType), last_run_id=d.get("last_run_id", None), latest_run_status=_enum(d, "latest_run_status", AnomalyDetectionRunStatus), ) +class AnomalyDetectionJobType(Enum): + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + class AnomalyDetectionRunStatus(Enum): """Status of Anomaly Detection Job Run""" diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index f707aadf7..7370c1138 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -3382,9 +3382,6 @@ class ServingEndpoint: task: Optional[str] = None """The task type of the serving endpoint.""" - usage_policy_id: Optional[str] = None - """The usage policy associated with serving endpoint.""" - def as_dict(self) -> dict: """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} @@ -3412,8 +3409,6 @@ def as_dict(self) -> dict: body["tags"] = [v.as_dict() for v in self.tags] if self.task is not None: body["task"] = self.task - if self.usage_policy_id is not None: - body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -3443,8 +3438,6 @@ def as_shallow_dict(self) -> dict: body["tags"] = self.tags if self.task is not None: body["task"] = self.task - if self.usage_policy_id is not None: - body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -3463,7 +3456,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpoint: state=_from_dict(d, "state", EndpointState), tags=_repeated_dict(d, "tags", EndpointTag), task=d.get("task", None), - usage_policy_id=d.get("usage_policy_id", None), ) @@ -3905,38 +3897,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, "routes", Route)) -@dataclass -class UpdateInferenceEndpointNotificationsResponse: - email_notifications: Optional[EmailNotifications] = None - - name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateInferenceEndpointNotificationsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateInferenceEndpointNotificationsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateInferenceEndpointNotificationsResponse: - """Deserializes the UpdateInferenceEndpointNotificationsResponse from a dictionary.""" - return cls( - email_notifications=_from_dict(d, "email_notifications", EmailNotifications), name=d.get("name", None) - ) - - @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -4522,7 +4482,6 @@ def query( self, name: str, *, - client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4536,15 +4495,11 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, - usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. - :param client_request_id: str (optional) - Optional user-provided request identifier that will be recorded in the inference table and the usage - tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4586,14 +4541,10 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - :param usage_context: Dict[str,str] (optional) - Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} - if client_request_id is not None: - body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4620,8 +4571,6 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature - if usage_context is not None: - body["usage_context"] = usage_context headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -4738,30 +4687,6 @@ def update_config_and_wait( traffic_config=traffic_config, ).result(timeout=timeout) - def update_notifications( - self, name: str, *, email_notifications: Optional[EmailNotifications] = None - ) -> UpdateInferenceEndpointNotificationsResponse: - """Updates the email and webhook notification settings for an endpoint. - - :param name: str - The name of the serving endpoint whose notifications are being updated. This field is required. - :param email_notifications: :class:`EmailNotifications` (optional) - The email notification settings to update. Specify email addresses to notify when endpoint state - changes occur. - - :returns: :class:`UpdateInferenceEndpointNotificationsResponse` - """ - body = {} - if email_notifications is not None: - body["email_notifications"] = email_notifications.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/serving-endpoints/{name}/notifications", body=body, headers=headers) - return UpdateInferenceEndpointNotificationsResponse.from_dict(res) - def update_permissions( self, serving_endpoint_id: str, @@ -4860,7 +4785,6 @@ def query( self, name: str, *, - client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4874,15 +4798,11 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, - usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. - :param client_request_id: str (optional) - Optional user-provided request identifier that will be recorded in the inference table and the usage - tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4924,14 +4844,10 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - :param usage_context: Dict[str,str] (optional) - Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} - if client_request_id is not None: - body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4958,8 +4874,6 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature - if usage_context is not None: - body["usage_context"] = usage_context data_plane_info = self._data_plane_info_query( name=name, ) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 3004f17da..975860d8a 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -3587,32 +3587,8 @@ def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredWorkspace: @dataclass class MicrosoftTeamsConfig: - app_id: Optional[str] = None - """[Input-Only] App ID for Microsoft Teams App.""" - - app_id_set: Optional[bool] = None - """[Output-Only] Whether App ID is set.""" - - auth_secret: Optional[str] = None - """[Input-Only] Secret for Microsoft Teams App authentication.""" - - auth_secret_set: Optional[bool] = None - """[Output-Only] Whether secret is set.""" - - channel_url: Optional[str] = None - """[Input-Only] Channel URL for Microsoft Teams App.""" - - channel_url_set: Optional[bool] = None - """[Output-Only] Whether Channel URL is set.""" - - tenant_id: Optional[str] = None - """[Input-Only] Tenant ID for Microsoft Teams App.""" - - tenant_id_set: Optional[bool] = None - """[Output-Only] Whether Tenant ID is set.""" - url: Optional[str] = None - """[Input-Only] URL for Microsoft Teams webhook.""" + """[Input-Only] URL for Microsoft Teams.""" url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" @@ -3620,22 +3596,6 @@ class MicrosoftTeamsConfig: def as_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.app_id_set is not None: - body["app_id_set"] = self.app_id_set - if self.auth_secret is not None: - body["auth_secret"] = self.auth_secret - if self.auth_secret_set is not None: - body["auth_secret_set"] = self.auth_secret_set - if self.channel_url is not None: - body["channel_url"] = self.channel_url - if self.channel_url_set is not None: - body["channel_url_set"] = self.channel_url_set - if self.tenant_id is not None: - body["tenant_id"] = self.tenant_id - if self.tenant_id_set is not None: - body["tenant_id_set"] = self.tenant_id_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -3645,22 +3605,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.app_id_set is not None: - body["app_id_set"] = self.app_id_set - if self.auth_secret is not None: - body["auth_secret"] = self.auth_secret - if self.auth_secret_set is not None: - body["auth_secret_set"] = self.auth_secret_set - if self.channel_url is not None: - body["channel_url"] = self.channel_url - if self.channel_url_set is not None: - body["channel_url_set"] = self.channel_url_set - if self.tenant_id is not None: - body["tenant_id"] = self.tenant_id - if self.tenant_id_set is not None: - body["tenant_id_set"] = self.tenant_id_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -3670,18 +3614,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> MicrosoftTeamsConfig: """Deserializes the MicrosoftTeamsConfig from a dictionary.""" - return cls( - app_id=d.get("app_id", None), - app_id_set=d.get("app_id_set", None), - auth_secret=d.get("auth_secret", None), - auth_secret_set=d.get("auth_secret_set", None), - channel_url=d.get("channel_url", None), - channel_url_set=d.get("channel_url_set", None), - tenant_id=d.get("tenant_id", None), - tenant_id_set=d.get("tenant_id_set", None), - url=d.get("url", None), - url_set=d.get("url_set", None), - ) + return cls(url=d.get("url", None), url_set=d.get("url_set", None)) @dataclass @@ -4684,18 +4617,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: @dataclass class SlackConfig: - channel_id: Optional[str] = None - """[Input-Only] Slack channel ID for notifications.""" - - channel_id_set: Optional[bool] = None - """[Output-Only] Whether channel ID is set.""" - - oauth_token: Optional[str] = None - """[Input-Only] OAuth token for Slack authentication.""" - - oauth_token_set: Optional[bool] = None - """[Output-Only] Whether OAuth token is set.""" - url: Optional[str] = None """[Input-Only] URL for Slack destination.""" @@ -4705,14 +4626,6 @@ class SlackConfig: def as_dict(self) -> dict: """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel_id is not None: - body["channel_id"] = self.channel_id - if self.channel_id_set is not None: - body["channel_id_set"] = self.channel_id_set - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token - if self.oauth_token_set is not None: - body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4722,14 +4635,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel_id is not None: - body["channel_id"] = self.channel_id - if self.channel_id_set is not None: - body["channel_id_set"] = self.channel_id_set - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token - if self.oauth_token_set is not None: - body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4739,14 +4644,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" - return cls( - channel_id=d.get("channel_id", None), - channel_id_set=d.get("channel_id_set", None), - oauth_token=d.get("oauth_token", None), - oauth_token_set=d.get("oauth_token_set", None), - url=d.get("url", None), - url_set=d.get("url_set", None), - ) + return cls(url=d.get("url", None), url_set=d.get("url_set", None)) @dataclass diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py index babfb1a09..9f58d1caf 100755 --- a/databricks/sdk/service/settingsv2.py +++ b/databricks/sdk/service/settingsv2.py @@ -4,10 +4,9 @@ import logging from dataclasses import dataclass -from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from ._internal import _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -15,63 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AibiDashboardEmbeddingAccessPolicy: - access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType - - def as_dict(self) -> dict: - """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_policy_type is not None: - body["access_policy_type"] = self.access_policy_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_policy_type is not None: - body["access_policy_type"] = self.access_policy_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicy: - """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary.""" - return cls( - access_policy_type=_enum(d, "access_policy_type", AibiDashboardEmbeddingAccessPolicyAccessPolicyType) - ) - - -class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): - - ALLOW_ALL_DOMAINS = "ALLOW_ALL_DOMAINS" - ALLOW_APPROVED_DOMAINS = "ALLOW_APPROVED_DOMAINS" - DENY_ALL_DOMAINS = "DENY_ALL_DOMAINS" - - -@dataclass -class AibiDashboardEmbeddingApprovedDomains: - approved_domains: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.approved_domains: - body["approved_domains"] = [v for v in self.approved_domains] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes.""" - body = {} - if self.approved_domains: - body["approved_domains"] = self.approved_domains - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomains: - """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary.""" - return cls(approved_domains=d.get("approved_domains", None)) - - @dataclass class BooleanMessage: value: Optional[bool] = None @@ -96,232 +38,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BooleanMessage: return cls(value=d.get("value", None)) -@dataclass -class ClusterAutoRestartMessage: - can_toggle: Optional[bool] = None - - enabled: Optional[bool] = None - - enablement_details: Optional[ClusterAutoRestartMessageEnablementDetails] = None - - maintenance_window: Optional[ClusterAutoRestartMessageMaintenanceWindow] = None - - restart_even_if_no_updates_available: Optional[bool] = None - - def as_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.can_toggle is not None: - body["can_toggle"] = self.can_toggle - if self.enabled is not None: - body["enabled"] = self.enabled - if self.enablement_details: - body["enablement_details"] = self.enablement_details.as_dict() - if self.maintenance_window: - body["maintenance_window"] = self.maintenance_window.as_dict() - if self.restart_even_if_no_updates_available is not None: - body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes.""" - body = {} - if self.can_toggle is not None: - body["can_toggle"] = self.can_toggle - if self.enabled is not None: - body["enabled"] = self.enabled - if self.enablement_details: - body["enablement_details"] = self.enablement_details - if self.maintenance_window: - body["maintenance_window"] = self.maintenance_window - if self.restart_even_if_no_updates_available is not None: - body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessage: - """Deserializes the ClusterAutoRestartMessage from a dictionary.""" - return cls( - can_toggle=d.get("can_toggle", None), - enabled=d.get("enabled", None), - enablement_details=_from_dict(d, "enablement_details", ClusterAutoRestartMessageEnablementDetails), - maintenance_window=_from_dict(d, "maintenance_window", ClusterAutoRestartMessageMaintenanceWindow), - restart_even_if_no_updates_available=d.get("restart_even_if_no_updates_available", None), - ) - - -@dataclass -class ClusterAutoRestartMessageEnablementDetails: - """Contains an information about the enablement status judging (e.g. whether the enterprise tier is - enabled) This is only additional information that MUST NOT be used to decide whether the setting - is enabled or not. This is intended to use only for purposes like showing an error message to - the customer with the additional details. For example, using these details we can check why - exactly the feature is disabled for this customer.""" - - forced_for_compliance_mode: Optional[bool] = None - """The feature is force enabled if compliance mode is active""" - - unavailable_for_disabled_entitlement: Optional[bool] = None - """The feature is unavailable if the corresponding entitlement disabled (see - getShieldEntitlementEnable)""" - - unavailable_for_non_enterprise_tier: Optional[bool] = None - """The feature is unavailable if the customer doesn't have enterprise tier""" - - def as_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageEnablementDetails into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.forced_for_compliance_mode is not None: - body["forced_for_compliance_mode"] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: - body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: - body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes.""" - body = {} - if self.forced_for_compliance_mode is not None: - body["forced_for_compliance_mode"] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: - body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: - body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageEnablementDetails: - """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary.""" - return cls( - forced_for_compliance_mode=d.get("forced_for_compliance_mode", None), - unavailable_for_disabled_entitlement=d.get("unavailable_for_disabled_entitlement", None), - unavailable_for_non_enterprise_tier=d.get("unavailable_for_non_enterprise_tier", None), - ) - - -@dataclass -class ClusterAutoRestartMessageMaintenanceWindow: - week_day_based_schedule: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule] = None - - def as_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.week_day_based_schedule: - body["week_day_based_schedule"] = self.week_day_based_schedule.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes.""" - body = {} - if self.week_day_based_schedule: - body["week_day_based_schedule"] = self.week_day_based_schedule - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindow: - """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary.""" - return cls( - week_day_based_schedule=_from_dict( - d, "week_day_based_schedule", ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule - ) - ) - - -class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): - - FRIDAY = "FRIDAY" - MONDAY = "MONDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" - THURSDAY = "THURSDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" - - -@dataclass -class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: - day_of_week: Optional[ClusterAutoRestartMessageMaintenanceWindowDayOfWeek] = None - - frequency: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency] = None - - window_start_time: Optional[ClusterAutoRestartMessageMaintenanceWindowWindowStartTime] = None - - def as_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.day_of_week is not None: - body["day_of_week"] = self.day_of_week.value - if self.frequency is not None: - body["frequency"] = self.frequency.value - if self.window_start_time: - body["window_start_time"] = self.window_start_time.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes.""" - body = {} - if self.day_of_week is not None: - body["day_of_week"] = self.day_of_week - if self.frequency is not None: - body["frequency"] = self.frequency - if self.window_start_time: - body["window_start_time"] = self.window_start_time - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: - """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary.""" - return cls( - day_of_week=_enum(d, "day_of_week", ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), - frequency=_enum(d, "frequency", ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), - window_start_time=_from_dict( - d, "window_start_time", ClusterAutoRestartMessageMaintenanceWindowWindowStartTime - ), - ) - - -class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): - - EVERY_WEEK = "EVERY_WEEK" - FIRST_AND_THIRD_OF_MONTH = "FIRST_AND_THIRD_OF_MONTH" - FIRST_OF_MONTH = "FIRST_OF_MONTH" - FOURTH_OF_MONTH = "FOURTH_OF_MONTH" - SECOND_AND_FOURTH_OF_MONTH = "SECOND_AND_FOURTH_OF_MONTH" - SECOND_OF_MONTH = "SECOND_OF_MONTH" - THIRD_OF_MONTH = "THIRD_OF_MONTH" - - -@dataclass -class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: - hours: Optional[int] = None - - minutes: Optional[int] = None - - def as_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.hours is not None: - body["hours"] = self.hours - if self.minutes is not None: - body["minutes"] = self.minutes - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes.""" - body = {} - if self.hours is not None: - body["hours"] = self.hours - if self.minutes is not None: - body["minutes"] = self.minutes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: - """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary.""" - return cls(hours=d.get("hours", None), minutes=d.get("minutes", None)) - - @dataclass class IntegerMessage: value: Optional[int] = None @@ -418,95 +134,14 @@ def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceSettingsMetadataResponse: ) -@dataclass -class PersonalComputeMessage: - value: Optional[PersonalComputeMessagePersonalComputeMessageEnum] = None - - def as_dict(self) -> dict: - """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.value is not None: - body["value"] = self.value.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes.""" - body = {} - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeMessage: - """Deserializes the PersonalComputeMessage from a dictionary.""" - return cls(value=_enum(d, "value", PersonalComputeMessagePersonalComputeMessageEnum)) - - -class PersonalComputeMessagePersonalComputeMessageEnum(Enum): - """ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing - all users to create single-machine compute resources. DELEGATE: Moves access control for the - Personal Compute default policy to individual workspaces and requires a workspace’s users or - groups to be added to the ACLs of that workspace’s Personal Compute default policy before they - will be able to create compute resources through that policy.""" - - DELEGATE = "DELEGATE" - ON = "ON" - - -@dataclass -class RestrictWorkspaceAdminsMessage: - status: RestrictWorkspaceAdminsMessageStatus - - def as_dict(self) -> dict: - """Serializes the RestrictWorkspaceAdminsMessage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes.""" - body = {} - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsMessage: - """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary.""" - return cls(status=_enum(d, "status", RestrictWorkspaceAdminsMessageStatus)) - - -class RestrictWorkspaceAdminsMessageStatus(Enum): - - ALLOW_ALL = "ALLOW_ALL" - RESTRICT_TOKENS_AND_JOB_RUN_AS = "RESTRICT_TOKENS_AND_JOB_RUN_AS" - - @dataclass class Setting: - aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None - - aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None - - automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None - boolean_val: Optional[BooleanMessage] = None - effective_aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None - - effective_aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None - - effective_automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None - effective_boolean_val: Optional[BooleanMessage] = None effective_integer_val: Optional[IntegerMessage] = None - effective_personal_compute: Optional[PersonalComputeMessage] = None - - effective_restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None - effective_string_val: Optional[StringMessage] = None integer_val: Optional[IntegerMessage] = None @@ -514,53 +149,23 @@ class Setting: name: Optional[str] = None """Name of the setting.""" - personal_compute: Optional[PersonalComputeMessage] = None - - restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None - string_val: Optional[StringMessage] = None def as_dict(self) -> dict: """Serializes the Setting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aibi_dashboard_embedding_access_policy: - body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy.as_dict() - if self.aibi_dashboard_embedding_approved_domains: - body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains.as_dict() - if self.automatic_cluster_update_workspace: - body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace.as_dict() if self.boolean_val: body["boolean_val"] = self.boolean_val.as_dict() - if self.effective_aibi_dashboard_embedding_access_policy: - body["effective_aibi_dashboard_embedding_access_policy"] = ( - self.effective_aibi_dashboard_embedding_access_policy.as_dict() - ) - if self.effective_aibi_dashboard_embedding_approved_domains: - body["effective_aibi_dashboard_embedding_approved_domains"] = ( - self.effective_aibi_dashboard_embedding_approved_domains.as_dict() - ) - if self.effective_automatic_cluster_update_workspace: - body["effective_automatic_cluster_update_workspace"] = ( - self.effective_automatic_cluster_update_workspace.as_dict() - ) if self.effective_boolean_val: body["effective_boolean_val"] = self.effective_boolean_val.as_dict() if self.effective_integer_val: body["effective_integer_val"] = self.effective_integer_val.as_dict() - if self.effective_personal_compute: - body["effective_personal_compute"] = self.effective_personal_compute.as_dict() - if self.effective_restrict_workspace_admins: - body["effective_restrict_workspace_admins"] = self.effective_restrict_workspace_admins.as_dict() if self.effective_string_val: body["effective_string_val"] = self.effective_string_val.as_dict() if self.integer_val: body["integer_val"] = self.integer_val.as_dict() if self.name is not None: body["name"] = self.name - if self.personal_compute: - body["personal_compute"] = self.personal_compute.as_dict() - if self.restrict_workspace_admins: - body["restrict_workspace_admins"] = self.restrict_workspace_admins.as_dict() if self.string_val: body["string_val"] = self.string_val.as_dict() return body @@ -568,42 +173,18 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Setting into a shallow dictionary of its immediate attributes.""" body = {} - if self.aibi_dashboard_embedding_access_policy: - body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy - if self.aibi_dashboard_embedding_approved_domains: - body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains - if self.automatic_cluster_update_workspace: - body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace if self.boolean_val: body["boolean_val"] = self.boolean_val - if self.effective_aibi_dashboard_embedding_access_policy: - body["effective_aibi_dashboard_embedding_access_policy"] = ( - self.effective_aibi_dashboard_embedding_access_policy - ) - if self.effective_aibi_dashboard_embedding_approved_domains: - body["effective_aibi_dashboard_embedding_approved_domains"] = ( - self.effective_aibi_dashboard_embedding_approved_domains - ) - if self.effective_automatic_cluster_update_workspace: - body["effective_automatic_cluster_update_workspace"] = self.effective_automatic_cluster_update_workspace if self.effective_boolean_val: body["effective_boolean_val"] = self.effective_boolean_val if self.effective_integer_val: body["effective_integer_val"] = self.effective_integer_val - if self.effective_personal_compute: - body["effective_personal_compute"] = self.effective_personal_compute - if self.effective_restrict_workspace_admins: - body["effective_restrict_workspace_admins"] = self.effective_restrict_workspace_admins if self.effective_string_val: body["effective_string_val"] = self.effective_string_val if self.integer_val: body["integer_val"] = self.integer_val if self.name is not None: body["name"] = self.name - if self.personal_compute: - body["personal_compute"] = self.personal_compute - if self.restrict_workspace_admins: - body["restrict_workspace_admins"] = self.restrict_workspace_admins if self.string_val: body["string_val"] = self.string_val return body @@ -612,36 +193,12 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Setting: """Deserializes the Setting from a dictionary.""" return cls( - aibi_dashboard_embedding_access_policy=_from_dict( - d, "aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy - ), - aibi_dashboard_embedding_approved_domains=_from_dict( - d, "aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains - ), - automatic_cluster_update_workspace=_from_dict( - d, "automatic_cluster_update_workspace", ClusterAutoRestartMessage - ), boolean_val=_from_dict(d, "boolean_val", BooleanMessage), - effective_aibi_dashboard_embedding_access_policy=_from_dict( - d, "effective_aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy - ), - effective_aibi_dashboard_embedding_approved_domains=_from_dict( - d, "effective_aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains - ), - effective_automatic_cluster_update_workspace=_from_dict( - d, "effective_automatic_cluster_update_workspace", ClusterAutoRestartMessage - ), effective_boolean_val=_from_dict(d, "effective_boolean_val", BooleanMessage), effective_integer_val=_from_dict(d, "effective_integer_val", IntegerMessage), - effective_personal_compute=_from_dict(d, "effective_personal_compute", PersonalComputeMessage), - effective_restrict_workspace_admins=_from_dict( - d, "effective_restrict_workspace_admins", RestrictWorkspaceAdminsMessage - ), effective_string_val=_from_dict(d, "effective_string_val", StringMessage), integer_val=_from_dict(d, "integer_val", IntegerMessage), name=d.get("name", None), - personal_compute=_from_dict(d, "personal_compute", PersonalComputeMessage), - restrict_workspace_admins=_from_dict(d, "restrict_workspace_admins", RestrictWorkspaceAdminsMessage), string_val=_from_dict(d, "string_val", StringMessage), ) @@ -729,8 +286,7 @@ def __init__(self, api_client): self._api = api_client def get_public_account_setting(self, name: str) -> Setting: - """Get a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of - setting available via public APIs at account level. + """Get a setting value at account level :param name: str @@ -747,8 +303,9 @@ def get_public_account_setting(self, name: str) -> Setting: def list_account_settings_metadata( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SettingsMetadata]: - """List valid setting keys and metadata. These settings are available to be referenced via GET - :method:settingsv2/getpublicaccountsetting and PATCH :method:settingsv2/patchpublicaccountsetting APIs + """List valid setting keys and metadata. These settings are available to referenced via [GET + /api/2.1/settings/{name}](#~1api~1account~1settingsv2~1getpublicaccountsetting) and [PATCH + /api/2.1/settings/{name}](#~1api~1account~1settingsv2~patchpublicaccountsetting) APIs :param page_size: int (optional) The maximum number of settings to return. The service may return fewer than this value. If @@ -785,8 +342,7 @@ def list_account_settings_metadata( query["page_token"] = json["next_page_token"] def patch_public_account_setting(self, name: str, setting: Setting) -> Setting: - """Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of - setting available via public APIs at account level. + """Patch a setting value at account level :param name: str :param setting: :class:`Setting` @@ -812,8 +368,7 @@ def __init__(self, api_client): self._api = api_client def get_public_workspace_setting(self, name: str) -> Setting: - """Get a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for list - of setting available via public APIs. + """Get a setting value at workspace level :param name: str @@ -830,9 +385,9 @@ def get_public_workspace_setting(self, name: str) -> Setting: def list_workspace_settings_metadata( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SettingsMetadata]: - """List valid setting keys and metadata. These settings are available to be referenced via GET - :method:settingsv2/getpublicworkspacesetting and PATCH :method:settingsv2/patchpublicworkspacesetting - APIs + """List valid setting keys and metadata. These settings are available to referenced via [GET + /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~1getpublicworkspacesetting) and [PATCH + /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~patchpublicworkspacesetting) APIs :param page_size: int (optional) The maximum number of settings to return. The service may return fewer than this value. If @@ -867,8 +422,7 @@ def list_workspace_settings_metadata( query["page_token"] = json["next_page_token"] def patch_public_workspace_setting(self, name: str, setting: Setting) -> Setting: - """Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for - list of setting available via public APIs at workspace level. + """Patch a setting value at workspace level :param name: str :param setting: :class:`Setting` diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index ad791cc15..065da110d 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1142,6 +1142,15 @@ class PermissionsChange: """The principal whose privileges we are changing. Only one of principal or principal_id should be specified, never both at the same time.""" + principal_id: Optional[int] = None + """An opaque internal ID that identifies the principal whose privileges should be removed. + + This field is intended for removing privileges associated with a deleted user. When set, only + the entries specified in the remove field are processed; any entries in the add field will be + rejected. + + Only one of principal or principal_id should be specified, never both at the same time.""" + remove: Optional[List[str]] = None """The set of privileges to remove.""" @@ -1152,6 +1161,8 @@ def as_dict(self) -> dict: body["add"] = [v for v in self.add] if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = [v for v in self.remove] return body @@ -1163,6 +1174,8 @@ def as_shallow_dict(self) -> dict: body["add"] = self.add if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.remove: body["remove"] = self.remove return body @@ -1170,7 +1183,12 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) + return cls( + add=d.get("add", None), + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + remove=d.get("remove", None), + ) class Privilege(Enum): @@ -1228,6 +1246,10 @@ class PrivilegeAssignment: """The principal (user email address or group name). For deleted principals, `principal` is empty while `principal_id` is populated.""" + principal_id: Optional[int] = None + """Unique identifier of the principal. For active principals, both `principal` and `principal_id` + are present.""" + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -1236,6 +1258,8 @@ def as_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = [v.value for v in self.privileges] return body @@ -1245,6 +1269,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.principal is not None: body["principal"] = self.principal + if self.principal_id is not None: + body["principal_id"] = self.principal_id if self.privileges: body["privileges"] = self.privileges return body @@ -1252,7 +1278,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls( + principal=d.get("principal", None), + principal_id=d.get("principal_id", None), + privileges=_repeated_enum(d, "privileges", Privilege), + ) @dataclass @@ -1827,32 +1857,59 @@ def from_dict(cls, d: Dict[str, Any]) -> SecurablePropertiesKvPairs: @dataclass class Share: + comment: Optional[str] = None + """The comment of the share.""" + + display_name: Optional[str] = None + """The display name of the share. If defined, it will be shown in the UI.""" + id: Optional[str] = None name: Optional[str] = None + tags: Optional[List[catalog.TagKeyValue]] = None + """The tags of the share.""" + def as_dict(self) -> dict: """Serializes the Share into a dictionary suitable for use as a JSON request body.""" body = {} + if self.comment is not None: + body["comment"] = self.comment + if self.display_name is not None: + body["display_name"] = self.display_name if self.id is not None: body["id"] = self.id if self.name is not None: body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Share into a shallow dictionary of its immediate attributes.""" body = {} + if self.comment is not None: + body["comment"] = self.comment + if self.display_name is not None: + body["display_name"] = self.display_name if self.id is not None: body["id"] = self.id if self.name is not None: body["name"] = self.name + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Share: """Deserializes the Share from a dictionary.""" - return cls(id=d.get("id", None), name=d.get("name", None)) + return cls( + comment=d.get("comment", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + name=d.get("name", None), + tags=_repeated_dict(d, "tags", catalog.TagKeyValue), + ) @dataclass @@ -1875,6 +1932,10 @@ class ShareInfo: owner: Optional[str] = None """Username of current owner of share.""" + serverless_budget_policy_id: Optional[str] = None + """Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN]""" + storage_location: Optional[str] = None """Storage Location URL (full path) for the share.""" @@ -1902,6 +1963,8 @@ def as_dict(self) -> dict: body["objects"] = [v.as_dict() for v in self.objects] if self.owner is not None: body["owner"] = self.owner + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1927,6 +1990,8 @@ def as_shallow_dict(self) -> dict: body["objects"] = self.objects if self.owner is not None: body["owner"] = self.owner + if self.serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = self.serverless_budget_policy_id if self.storage_location is not None: body["storage_location"] = self.storage_location if self.storage_root is not None: @@ -1947,6 +2012,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: name=d.get("name", None), objects=_repeated_dict(d, "objects", SharedDataObject), owner=d.get("owner", None), + serverless_budget_policy_id=d.get("serverless_budget_policy_id", None), storage_location=d.get("storage_location", None), storage_root=d.get("storage_root", None), updated_at=d.get("updated_at", None), @@ -2307,9 +2373,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Table: class TableInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" - auxiliary_managed_location: Optional[str] = None - """Managed Delta Metadata location for foreign iceberg tables.""" - parent_storage_location: Optional[str] = None """Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of the parent UC entity's storage_location, following the same logic as getManagedEntityPath in @@ -2330,8 +2393,6 @@ class TableInternalAttributes: def as_dict(self) -> dict: """Serializes the TableInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auxiliary_managed_location is not None: - body["auxiliary_managed_location"] = self.auxiliary_managed_location if self.parent_storage_location is not None: body["parent_storage_location"] = self.parent_storage_location if self.storage_location is not None: @@ -2345,8 +2406,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TableInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.auxiliary_managed_location is not None: - body["auxiliary_managed_location"] = self.auxiliary_managed_location if self.parent_storage_location is not None: body["parent_storage_location"] = self.parent_storage_location if self.storage_location is not None: @@ -2361,7 +2420,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TableInternalAttributes: """Deserializes the TableInternalAttributes from a dictionary.""" return cls( - auxiliary_managed_location=d.get("auxiliary_managed_location", None), parent_storage_location=d.get("parent_storage_location", None), storage_location=d.get("storage_location", None), type=_enum(d, "type", TableInternalAttributesSharedTableType), @@ -2374,10 +2432,8 @@ class TableInternalAttributesSharedTableType(Enum): DELTA_ICEBERG_TABLE = "DELTA_ICEBERG_TABLE" DIRECTORY_BASED_TABLE = "DIRECTORY_BASED_TABLE" FILE_BASED_TABLE = "FILE_BASED_TABLE" - FOREIGN_ICEBERG_TABLE = "FOREIGN_ICEBERG_TABLE" FOREIGN_TABLE = "FOREIGN_TABLE" MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - METRIC_VIEW = "METRIC_VIEW" STREAMING_TABLE = "STREAMING_TABLE" VIEW = "VIEW" @@ -3310,7 +3366,14 @@ class SharesAPI: def __init__(self, api_client): self._api = api_client - def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: + def create( + self, + name: str, + *, + comment: Optional[str] = None, + serverless_budget_policy_id: Optional[str] = None, + storage_root: Optional[str] = None, + ) -> ShareInfo: """Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. @@ -3318,6 +3381,9 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti Name of the share. :param comment: str (optional) User-provided free-form text description. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. @@ -3328,6 +3394,8 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti body["comment"] = comment if name is not None: body["name"] = name + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root headers = { @@ -3373,9 +3441,7 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}", query=query, headers=headers) return ShareInfo.from_dict(res) - def list_shares( - self, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ShareInfo]: + def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: """Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. @@ -3454,6 +3520,7 @@ def update( comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + serverless_budget_policy_id: Optional[str] = None, storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None, ) -> ShareInfo: @@ -3481,6 +3548,9 @@ def update( New name for the share. :param owner: str (optional) Username of current owner of share. + :param serverless_budget_policy_id: str (optional) + Serverless budget policy id (can only be created/updated when calling data-sharing service) + [Create,Update:IGN] :param storage_root: str (optional) Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) @@ -3495,6 +3565,8 @@ def update( body["new_name"] = new_name if owner is not None: body["owner"] = owner + if serverless_budget_policy_id is not None: + body["serverless_budget_policy_id"] = serverless_budget_policy_id if storage_root is not None: body["storage_root"] = storage_root if updates is not None: diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 33b80c3c9..b35039c98 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -646,10 +646,6 @@ class AlertV2: display_name: Optional[str] = None """The display name of the alert.""" - effective_run_as: Optional[AlertV2RunAs] = None - """The actual identity that will be used to execute the alert. This is an output-only field that - shows the resolved run-as identity after applying permissions and defaults.""" - evaluation: Optional[AlertV2Evaluation] = None id: Optional[str] = None @@ -668,18 +664,10 @@ class AlertV2: query_text: Optional[str] = None """Text of the query to be run.""" - run_as: Optional[AlertV2RunAs] = None - """Specifies the identity that will be used to run the alert. This field allows you to configure - alerts to run as a specific user or service principal. - For user identity: Set `user_name` to - the email of an active workspace user. Users can only set this to their own email. - For service - principal: Set `service_principal_name` to the application ID. Requires the - `servicePrincipal/user` role. If not specified, the alert will run as the request user.""" - run_as_user_name: Optional[str] = None """The run as username or application ID of service principal. On Create and Update, this field can be set to application ID of an active service principal. Setting this field requires the - servicePrincipal/user role. Deprecated: Use `run_as` field instead. This field will be removed - in a future release.""" + servicePrincipal/user role.""" schedule: Optional[CronSchedule] = None @@ -700,8 +688,6 @@ def as_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name - if self.effective_run_as: - body["effective_run_as"] = self.effective_run_as.as_dict() if self.evaluation: body["evaluation"] = self.evaluation.as_dict() if self.id is not None: @@ -714,8 +700,6 @@ def as_dict(self) -> dict: body["parent_path"] = self.parent_path if self.query_text is not None: body["query_text"] = self.query_text - if self.run_as: - body["run_as"] = self.run_as.as_dict() if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.schedule: @@ -737,8 +721,6 @@ def as_shallow_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name - if self.effective_run_as: - body["effective_run_as"] = self.effective_run_as if self.evaluation: body["evaluation"] = self.evaluation if self.id is not None: @@ -751,8 +733,6 @@ def as_shallow_dict(self) -> dict: body["parent_path"] = self.parent_path if self.query_text is not None: body["query_text"] = self.query_text - if self.run_as: - body["run_as"] = self.run_as if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.schedule: @@ -771,14 +751,12 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2: custom_description=d.get("custom_description", None), custom_summary=d.get("custom_summary", None), display_name=d.get("display_name", None), - effective_run_as=_from_dict(d, "effective_run_as", AlertV2RunAs), evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), id=d.get("id", None), lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), owner_user_name=d.get("owner_user_name", None), parent_path=d.get("parent_path", None), query_text=d.get("query_text", None), - run_as=_from_dict(d, "run_as", AlertV2RunAs), run_as_user_name=d.get("run_as_user_name", None), schedule=_from_dict(d, "schedule", CronSchedule), update_time=d.get("update_time", None), @@ -792,8 +770,7 @@ class AlertV2Evaluation: """Operator used for comparison in alert evaluation.""" empty_result_state: Optional[AlertEvaluationState] = None - """Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because - `UNKNOWN` state is planned to be deprecated.""" + """Alert state if result is empty.""" last_evaluated_at: Optional[str] = None """Timestamp of the last evaluation.""" @@ -1015,39 +992,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandValue: ) -@dataclass -class AlertV2RunAs: - service_principal_name: Optional[str] = None - """Application ID of an active service principal. Setting this field requires the - `servicePrincipal/user` role.""" - - user_name: Optional[str] = None - """The email of an active workspace user. Can only set this field to their own email.""" - - def as_dict(self) -> dict: - """Serializes the AlertV2RunAs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AlertV2RunAs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AlertV2RunAs: - """Deserializes the AlertV2RunAs from a dictionary.""" - return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) - - @dataclass class AlertV2Subscription: destination_id: Optional[str] = None @@ -3915,18 +3859,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponseAlert: @dataclass class ListAlertsV2Response: - alerts: Optional[List[AlertV2]] = None - next_page_token: Optional[str] = None results: Optional[List[AlertV2]] = None - """Deprecated. Use `alerts` instead.""" def as_dict(self) -> dict: """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: - body["alerts"] = [v.as_dict() for v in self.alerts] if self.next_page_token is not None: body["next_page_token"] = self.next_page_token if self.results: @@ -3936,8 +3875,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the ListAlertsV2Response into a shallow dictionary of its immediate attributes.""" body = {} - if self.alerts: - body["alerts"] = self.alerts if self.next_page_token is not None: body["next_page_token"] = self.next_page_token if self.results: @@ -3947,11 +3884,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response: """Deserializes the ListAlertsV2Response from a dictionary.""" - return cls( - alerts=_repeated_dict(d, "alerts", AlertV2), - next_page_token=d.get("next_page_token", None), - results=_repeated_dict(d, "results", AlertV2), - ) + return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2)) class ListOrder(Enum): @@ -4790,9 +4723,6 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryFilter: @dataclass class QueryInfo: - cache_query_id: Optional[str] = None - """The ID of the cached query if this result retrieved from cache""" - channel_used: Optional[ChannelInfo] = None """SQL Warehouse channel information at the time of query execution""" @@ -4878,8 +4808,6 @@ class QueryInfo: def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cache_query_id is not None: - body["cache_query_id"] = self.cache_query_id if self.channel_used: body["channel_used"] = self.channel_used.as_dict() if self.client_application is not None: @@ -4933,8 +4861,6 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the QueryInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cache_query_id is not None: - body["cache_query_id"] = self.cache_query_id if self.channel_used: body["channel_used"] = self.channel_used if self.client_application is not None: @@ -4989,7 +4915,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" return cls( - cache_query_id=d.get("cache_query_id", None), channel_used=_from_dict(d, "channel_used", ChannelInfo), client_application=d.get("client_application", None), duration=d.get("duration", None), @@ -9546,7 +9471,7 @@ def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: return GetWorkspaceWarehouseConfigResponse.from_dict(res) def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: - """Lists all SQL warehouses that a user has access to. + """Lists all SQL warehouses that a user has manager permissions on. :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user diff --git a/databricks/sdk/service/tags.py b/databricks/sdk/service/tags.py index 9fa90681a..0d851907e 100755 --- a/databricks/sdk/service/tags.py +++ b/databricks/sdk/service/tags.py @@ -14,6 +14,39 @@ # all definitions in this file are in alphabetical order +@dataclass +class ListTagAssignmentsResponse: + next_page_token: Optional[str] = None + + tag_assignments: Optional[List[TagAssignment]] = None + + def as_dict(self) -> dict: + """Serializes the ListTagAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tag_assignments: + body["tag_assignments"] = [v.as_dict() for v in self.tag_assignments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListTagAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tag_assignments: + body["tag_assignments"] = self.tag_assignments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListTagAssignmentsResponse: + """Deserializes the ListTagAssignmentsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + tag_assignments=_repeated_dict(d, "tag_assignments", TagAssignment), + ) + + @dataclass class ListTagPoliciesResponse: next_page_token: Optional[str] = None @@ -47,34 +80,71 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTagPoliciesResponse: @dataclass -class TagPolicy: +class TagAssignment: + entity_type: str + + entity_id: str + tag_key: str - create_time: Optional[str] = None - """Timestamp when the tag policy was created""" + tag_value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the TagAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_id is not None: + body["entity_id"] = self.entity_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.tag_key is not None: + body["tag_key"] = self.tag_key + if self.tag_value is not None: + body["tag_value"] = self.tag_value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TagAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_id is not None: + body["entity_id"] = self.entity_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.tag_key is not None: + body["tag_key"] = self.tag_key + if self.tag_value is not None: + body["tag_value"] = self.tag_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TagAssignment: + """Deserializes the TagAssignment from a dictionary.""" + return cls( + entity_id=d.get("entity_id", None), + entity_type=d.get("entity_type", None), + tag_key=d.get("tag_key", None), + tag_value=d.get("tag_value", None), + ) + + +@dataclass +class TagPolicy: + tag_key: str description: Optional[str] = None id: Optional[str] = None - update_time: Optional[str] = None - """Timestamp when the tag policy was last updated""" - values: Optional[List[Value]] = None def as_dict(self) -> dict: """Serializes the TagPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time if self.description is not None: body["description"] = self.description if self.id is not None: body["id"] = self.id if self.tag_key is not None: body["tag_key"] = self.tag_key - if self.update_time is not None: - body["update_time"] = self.update_time if self.values: body["values"] = [v.as_dict() for v in self.values] return body @@ -82,16 +152,12 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TagPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time if self.description is not None: body["description"] = self.description if self.id is not None: body["id"] = self.id if self.tag_key is not None: body["tag_key"] = self.tag_key - if self.update_time is not None: - body["update_time"] = self.update_time if self.values: body["values"] = self.values return body @@ -100,11 +166,9 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TagPolicy: """Deserializes the TagPolicy from a dictionary.""" return cls( - create_time=d.get("create_time", None), description=d.get("description", None), id=d.get("id", None), tag_key=d.get("tag_key", None), - update_time=d.get("update_time", None), values=_repeated_dict(d, "values", Value), ) @@ -133,17 +197,147 @@ def from_dict(cls, d: Dict[str, Any]) -> Value: return cls(name=d.get("name", None)) -class TagPoliciesAPI: - """The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions for tag - policies can be managed using the [Account Access Control Proxy API]. +class TagAssignmentsAPI: + """Manage tag assignments on workspace-scoped objects.""" + + def __init__(self, api_client): + self._api = api_client + + def create_tag_assignment(self, tag_assignment: TagAssignment) -> TagAssignment: + """Create a tag assignment + + :param tag_assignment: :class:`TagAssignment` + + :returns: :class:`TagAssignment` + """ + body = tag_assignment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/entity-tag-assignments", body=body, headers=headers) + return TagAssignment.from_dict(res) + + def delete_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str): + """Delete a tag assignment - [Account Access Control Proxy API]: https://docs.databricks.com/api/workspace/accountaccesscontrolproxy""" + :param entity_type: str + :param entity_id: str + :param tag_key: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", headers=headers + ) + + def get_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str) -> TagAssignment: + """Get a tag assignment + + :param entity_type: str + :param entity_id: str + :param tag_key: str + + :returns: :class:`TagAssignment` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", headers=headers + ) + return TagAssignment.from_dict(res) + + def list_tag_assignments( + self, entity_type: str, entity_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[TagAssignment]: + """List the tag assignments for an entity + + :param entity_type: str + :param entity_id: str + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`TagAssignment` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}", query=query, headers=headers + ) + if "tag_assignments" in json: + for v in json["tag_assignments"]: + yield TagAssignment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_tag_assignment( + self, entity_type: str, entity_id: str, tag_key: str, tag_assignment: TagAssignment, update_mask: str + ) -> TagAssignment: + """Update a tag assignment + + :param entity_type: str + :param entity_id: str + :param tag_key: str + :param tag_assignment: :class:`TagAssignment` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`TagAssignment` + """ + body = tag_assignment.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags/{tag_key}", + query=query, + body=body, + headers=headers, + ) + return TagAssignment.from_dict(res) + + +class TagPoliciesAPI: + """The Tag Policy API allows you to manage tag policies in Databricks.""" def __init__(self, api_client): self._api = api_client def create_tag_policy(self, tag_policy: TagPolicy) -> TagPolicy: - """Creates a new tag policy, making the associated tag key governed. + """Creates a new tag policy. :param tag_policy: :class:`TagPolicy` @@ -159,7 +353,7 @@ def create_tag_policy(self, tag_policy: TagPolicy) -> TagPolicy: return TagPolicy.from_dict(res) def delete_tag_policy(self, tag_key: str): - """Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. + """Deletes a tag policy by its key. :param tag_key: str @@ -173,7 +367,7 @@ def delete_tag_policy(self, tag_key: str): self._api.do("DELETE", f"/api/2.1/tag-policies/{tag_key}", headers=headers) def get_tag_policy(self, tag_key: str) -> TagPolicy: - """Gets a single tag policy by its associated governed tag's key. + """Gets a single tag policy by its key. :param tag_key: str @@ -190,14 +384,10 @@ def get_tag_policy(self, tag_key: str) -> TagPolicy: def list_tag_policies( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[TagPolicy]: - """Lists the tag policies for all governed tags in the account. + """Lists all tag policies in the account. :param page_size: int (optional) - The maximum number of results to return in this request. Fewer results may be returned than - requested. If unspecified or set to 0, this defaults to 1000. The maximum value is 1000; values - above 1000 will be coerced down to 1000. :param page_token: str (optional) - An optional page token received from a previous list tag policies call. :returns: Iterator over :class:`TagPolicy` """ @@ -221,7 +411,7 @@ def list_tag_policies( query["page_token"] = json["next_page_token"] def update_tag_policy(self, tag_key: str, tag_policy: TagPolicy, update_mask: str) -> TagPolicy: - """Updates an existing tag policy for a single governed tag. + """Updates an existing tag policy. :param tag_key: str :param tag_policy: :class:`TagPolicy` diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 8e706ccd6..237b2e088 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -192,6 +192,9 @@ class DeltaSyncVectorIndexSpecRequest: columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -216,6 +219,8 @@ def as_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = [v for v in self.columns_to_sync] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -233,6 +238,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.columns_to_sync: body["columns_to_sync"] = self.columns_to_sync + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -250,6 +257,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" return cls( columns_to_sync=d.get("columns_to_sync", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -260,6 +268,9 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: @dataclass class DeltaSyncVectorIndexSpecResponse: + effective_budget_policy_id: Optional[str] = None + """The budget policy id applied to the vector search index""" + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -285,6 +296,8 @@ class DeltaSyncVectorIndexSpecResponse: def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -302,6 +315,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -320,6 +335,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" return cls( + effective_budget_policy_id=d.get("effective_budget_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -377,10 +393,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DirectAccessVectorIndexSpec: @dataclass class EmbeddingSourceColumn: embedding_model_endpoint_name: Optional[str] = None - """Name of the embedding model endpoint, used by default for both ingestion and querying.""" - - model_endpoint_name_for_query: Optional[str] = None - """Name of the embedding model endpoint which, if specified, is used for querying (not ingestion).""" + """Name of the embedding model endpoint""" name: Optional[str] = None """Name of the column""" @@ -390,8 +403,6 @@ def as_dict(self) -> dict: body = {} if self.embedding_model_endpoint_name is not None: body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name - if self.model_endpoint_name_for_query is not None: - body["model_endpoint_name_for_query"] = self.model_endpoint_name_for_query if self.name is not None: body["name"] = self.name return body @@ -401,8 +412,6 @@ def as_shallow_dict(self) -> dict: body = {} if self.embedding_model_endpoint_name is not None: body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name - if self.model_endpoint_name_for_query is not None: - body["model_endpoint_name_for_query"] = self.model_endpoint_name_for_query if self.name is not None: body["name"] = self.name return body @@ -410,11 +419,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingSourceColumn: """Deserializes the EmbeddingSourceColumn from a dictionary.""" - return cls( - embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), - model_endpoint_name_for_query=d.get("model_endpoint_name_for_query", None), - name=d.get("name", None), - ) + return cls(embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), name=d.get("name", None)) @dataclass @@ -1522,8 +1527,7 @@ def update_endpoint_budget_policy( :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str - The budget policy id to be applied (hima-sheth) TODO: remove this once we've migrated to usage - policies + The budget policy id to be applied :returns: :class:`PatchEndpointBudgetPolicyResponse` """ diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index cab860d9c..d9a1cafca 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -425,6 +425,12 @@ class ExportFormat(Enum): SOURCE = "SOURCE" +class ExportOutputs(Enum): + + ALL = "ALL" + NONE = "NONE" + + @dataclass class ExportResponse: """The request field `direct_download` determines whether a JSON response or binary contents are @@ -2638,7 +2644,9 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) - def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: + def export( + self, path: str, *, format: Optional[ExportFormat] = None, outputs: Optional[ExportOutputs] = None + ) -> ExportResponse: """Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -2660,6 +2668,11 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. + :param outputs: :class:`ExportOutputs` (optional) + This specifies which cell outputs should be included in the export (if the export format allows it). + If not specified, the behavior is determined by the format. For JUPYTER format, the default is to + include all outputs. This is a public endpoint, but only ALL or NONE is documented publically, + DATABRICKS is internal only :returns: :class:`ExportResponse` """ @@ -2667,6 +2680,8 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR query = {} if format is not None: query["format"] = format.value + if outputs is not None: + query["outputs"] = outputs.value if path is not None: query["path"] = path headers = { From 22f5922bd2a9fa7f1b832b407376cd475f32a56b Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Mon, 29 Sep 2025 09:31:49 +0000 Subject: [PATCH 2/3] update sdk --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + databricks/sdk/__init__.py | 166 +- databricks/sdk/service/agentbricks.py | 6 +- databricks/sdk/service/apps.py | 783 +++++- databricks/sdk/service/billing.py | 238 ++ databricks/sdk/service/catalog.py | 2645 ++++++++++++++------ databricks/sdk/service/cleanrooms.py | 46 +- databricks/sdk/service/compute.py | 89 +- databricks/sdk/service/dashboards.py | 288 ++- databricks/sdk/service/database.py | 1357 ++++++++++- databricks/sdk/service/dataquality.py | 1185 +++++++++ databricks/sdk/service/iam.py | 3049 ++++++++++++++++++++---- databricks/sdk/service/iamv2.py | 252 +- databricks/sdk/service/jobs.py | 151 +- databricks/sdk/service/ml.py | 447 +++- databricks/sdk/service/oauth2.py | 6 +- databricks/sdk/service/pipelines.py | 188 ++ databricks/sdk/service/provisioning.py | 1496 +++++++----- databricks/sdk/service/serving.py | 86 + databricks/sdk/service/settings.py | 108 +- databricks/sdk/service/settingsv2.py | 468 +++- databricks/sdk/service/sharing.py | 43 +- databricks/sdk/service/sql.py | 438 +++- databricks/sdk/service/tags.py | 55 +- databricks/sdk/service/vectorsearch.py | 84 +- 26 files changed, 11387 insertions(+), 2290 deletions(-) create mode 100755 databricks/sdk/service/dataquality.py diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2db0598a6..82cc21f8f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -universe:/home/parth.bansal/universe \ No newline at end of file +universe:/home/parth.bansal/vn0/universe \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index e68ce1aca..9d89ea571 100755 --- a/.gitattributes +++ b/.gitattributes @@ -9,6 +9,7 @@ databricks/sdk/service/cleanrooms.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true databricks/sdk/service/dashboards.py linguist-generated=true databricks/sdk/service/database.py linguist-generated=true +databricks/sdk/service/dataquality.py linguist-generated=true databricks/sdk/service/files.py linguist-generated=true databricks/sdk/service/iam.py linguist-generated=true databricks/sdk/service/iamv2.py linguist-generated=true diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 101013cb5..4d42ed22a 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -21,6 +21,7 @@ from databricks.sdk.service import compute as pkg_compute from databricks.sdk.service import dashboards as pkg_dashboards from databricks.sdk.service import database as pkg_database +from databricks.sdk.service import dataquality as pkg_dataquality from databricks.sdk.service import files as pkg_files from databricks.sdk.service import iam as pkg_iam from databricks.sdk.service import iamv2 as pkg_iamv2 @@ -40,10 +41,10 @@ from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace from databricks.sdk.service.agentbricks import AgentBricksAPI -from databricks.sdk.service.apps import AppsAPI +from databricks.sdk.service.apps import AppsAPI, AppsSettingsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, BudgetsAPI, LogDeliveryAPI, - UsageDashboardsAPI) + UsageDashboardsAPI, UsagePolicyAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -55,13 +56,13 @@ ExternalMetadataAPI, FunctionsAPI, GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, - QualityMonitorsAPI, + PoliciesAPI, QualityMonitorsAPI, RegisteredModelsAPI, - RequestForAccessAPI, - ResourceQuotasAPI, SchemasAPI, - StorageCredentialsAPI, + ResourceQuotasAPI, RfaAPI, + SchemasAPI, StorageCredentialsAPI, SystemSchemasAPI, TableConstraintsAPI, TablesAPI, + TemporaryPathCredentialsAPI, TemporaryTableCredentialsAPI, VolumesAPI, WorkspaceBindingsAPI) from databricks.sdk.service.cleanrooms import (CleanRoomAssetRevisionsAPI, @@ -79,17 +80,21 @@ from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, LakeviewEmbeddedAPI, QueryExecutionAPI) -from databricks.sdk.service.database import DatabaseAPI +from databricks.sdk.service.database import DatabaseAPI, DatabaseProjectAPI +from databricks.sdk.service.dataquality import DataQualityAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccessControlAPI, AccountAccessControlAPI, AccountAccessControlProxyAPI, - AccountGroupsAPI, + AccountGroupsAPI, AccountGroupsV2API, AccountServicePrincipalsAPI, - AccountUsersAPI, CurrentUserAPI, - GroupsAPI, PermissionMigrationAPI, - PermissionsAPI, ServicePrincipalsAPI, - UsersAPI, WorkspaceAssignmentAPI) + AccountServicePrincipalsV2API, + AccountUsersAPI, AccountUsersV2API, + CurrentUserAPI, GroupsAPI, GroupsV2API, + PermissionMigrationAPI, PermissionsAPI, + ServicePrincipalsAPI, + ServicePrincipalsV2API, UsersAPI, + UsersV2API, WorkspaceAssignmentAPI) from databricks.sdk.service.iamv2 import AccountIamV2API, WorkspaceIamV2API from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI from databricks.sdk.service.marketplace import ( @@ -98,8 +103,9 @@ ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI, ProviderListingsAPI, ProviderPersonalizationRequestsAPI, ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) -from databricks.sdk.service.ml import (ExperimentsAPI, FeatureStoreAPI, - ForecastingAPI, MaterializedFeaturesAPI, +from databricks.sdk.service.ml import (ExperimentsAPI, FeatureEngineeringAPI, + FeatureStoreAPI, ForecastingAPI, + MaterializedFeaturesAPI, ModelRegistryAPI) from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, CustomAppIntegrationAPI, @@ -257,6 +263,7 @@ def __init__( self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client) self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client) self._apps = pkg_apps.AppsAPI(self._api_client) + self._apps_settings = pkg_apps.AppsSettingsAPI(self._api_client) self._artifact_allowlists = pkg_catalog.ArtifactAllowlistsAPI(self._api_client) self._catalogs = pkg_catalog.CatalogsAPI(self._api_client) self._clean_room_asset_revisions = pkg_cleanrooms.CleanRoomAssetRevisionsAPI(self._api_client) @@ -278,8 +285,10 @@ def __init__( self._current_user = pkg_iam.CurrentUserAPI(self._api_client) self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client) self._dashboards = pkg_sql.DashboardsAPI(self._api_client) + self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client) self._data_sources = pkg_sql.DataSourcesAPI(self._api_client) self._database = pkg_database.DatabaseAPI(self._api_client) + self._database_project = pkg_database.DatabaseProjectAPI(self._api_client) self._dbfs = DbfsExt(self._api_client) self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._entity_tag_assignments = pkg_catalog.EntityTagAssignmentsAPI(self._api_client) @@ -287,6 +296,7 @@ def __init__( self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client) self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client) self._external_metadata = pkg_catalog.ExternalMetadataAPI(self._api_client) + self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) self._functions = pkg_catalog.FunctionsAPI(self._api_client) @@ -294,7 +304,7 @@ def __init__( self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client) self._global_init_scripts = pkg_compute.GlobalInitScriptsAPI(self._api_client) self._grants = pkg_catalog.GrantsAPI(self._api_client) - self._groups = pkg_iam.GroupsAPI(self._api_client) + self._groups_v2 = pkg_iam.GroupsV2API(self._api_client) self._instance_pools = pkg_compute.InstancePoolsAPI(self._api_client) self._instance_profiles = pkg_compute.InstanceProfilesAPI(self._api_client) self._ip_access_lists = pkg_settings.IpAccessListsAPI(self._api_client) @@ -311,6 +321,7 @@ def __init__( self._permission_migration = pkg_iam.PermissionMigrationAPI(self._api_client) self._permissions = pkg_iam.PermissionsAPI(self._api_client) self._pipelines = pkg_pipelines.PipelinesAPI(self._api_client) + self._policies = pkg_catalog.PoliciesAPI(self._api_client) self._policy_compliance_for_clusters = pkg_compute.PolicyComplianceForClustersAPI(self._api_client) self._policy_compliance_for_jobs = pkg_jobs.PolicyComplianceForJobsAPI(self._api_client) self._policy_families = pkg_compute.PolicyFamiliesAPI(self._api_client) @@ -338,12 +349,12 @@ def __init__( self._redash_config = pkg_sql.RedashConfigAPI(self._api_client) self._registered_models = pkg_catalog.RegisteredModelsAPI(self._api_client) self._repos = pkg_workspace.ReposAPI(self._api_client) - self._request_for_access = pkg_catalog.RequestForAccessAPI(self._api_client) self._resource_quotas = pkg_catalog.ResourceQuotasAPI(self._api_client) + self._rfa = pkg_catalog.RfaAPI(self._api_client) self._schemas = pkg_catalog.SchemasAPI(self._api_client) self._secrets = pkg_workspace.SecretsAPI(self._api_client) self._service_principal_secrets_proxy = pkg_oauth2.ServicePrincipalSecretsProxyAPI(self._api_client) - self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) + self._service_principals_v2 = pkg_iam.ServicePrincipalsV2API(self._api_client) self._serving_endpoints = serving_endpoints serving_endpoints_data_plane_token_source = DataPlaneTokenSource( self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh @@ -360,10 +371,11 @@ def __init__( self._tables = pkg_catalog.TablesAPI(self._api_client) self._tag_assignments = pkg_tags.TagAssignmentsAPI(self._api_client) self._tag_policies = pkg_tags.TagPoliciesAPI(self._api_client) + self._temporary_path_credentials = pkg_catalog.TemporaryPathCredentialsAPI(self._api_client) self._temporary_table_credentials = pkg_catalog.TemporaryTableCredentialsAPI(self._api_client) self._token_management = pkg_settings.TokenManagementAPI(self._api_client) self._tokens = pkg_settings.TokensAPI(self._api_client) - self._users = pkg_iam.UsersAPI(self._api_client) + self._users_v2 = pkg_iam.UsersV2API(self._api_client) self._vector_search_endpoints = pkg_vectorsearch.VectorSearchEndpointsAPI(self._api_client) self._vector_search_indexes = pkg_vectorsearch.VectorSearchIndexesAPI(self._api_client) self._volumes = pkg_catalog.VolumesAPI(self._api_client) @@ -374,6 +386,9 @@ def __init__( self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client) + self._groups = pkg_iam.GroupsAPI(self._api_client) + self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) + self._users = pkg_iam.UsersAPI(self._api_client) @property def config(self) -> client.Config: @@ -422,6 +437,11 @@ def apps(self) -> pkg_apps.AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps + @property + def apps_settings(self) -> pkg_apps.AppsSettingsAPI: + """Apps Settings manage the settings for the Apps service on a customer's Databricks instance.""" + return self._apps_settings + @property def artifact_allowlists(self) -> pkg_catalog.ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" @@ -527,6 +547,11 @@ def dashboards(self) -> pkg_sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards + @property + def data_quality(self) -> pkg_dataquality.DataQualityAPI: + """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`).""" + return self._data_quality + @property def data_sources(self) -> pkg_sql.DataSourcesAPI: """This API is provided to assist you in making new query objects.""" @@ -537,6 +562,11 @@ def database(self) -> pkg_database.DatabaseAPI: """Database Instances provide access to a database via REST API or direct SQL.""" return self._database + @property + def database_project(self) -> pkg_database.DatabaseProjectAPI: + """Database Projects provide access to a database via REST API or direct SQL.""" + return self._database_project + @property def dbfs(self) -> DbfsExt: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" @@ -549,7 +579,7 @@ def dbsql_permissions(self) -> pkg_sql.DbsqlPermissionsAPI: @property def entity_tag_assignments(self) -> pkg_catalog.EntityTagAssignmentsAPI: - """Entity Tag Assignments provide a unified interface for managing tag assignments on Unity Catalog entities.""" + """Tags are attributes that include keys and optional values that you can use to organize and categorize entities in Unity Catalog.""" return self._entity_tag_assignments @property @@ -572,6 +602,11 @@ def external_metadata(self) -> pkg_catalog.ExternalMetadataAPI: """External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog.""" return self._external_metadata + @property + def feature_engineering(self) -> pkg_ml.FeatureEngineeringAPI: + """[description].""" + return self._feature_engineering + @property def feature_store(self) -> pkg_ml.FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features.""" @@ -608,9 +643,9 @@ def grants(self) -> pkg_catalog.GrantsAPI: return self._grants @property - def groups(self) -> pkg_iam.GroupsAPI: + def groups_v2(self) -> pkg_iam.GroupsV2API: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" - return self._groups + return self._groups_v2 @property def instance_pools(self) -> pkg_compute.InstancePoolsAPI: @@ -692,6 +727,11 @@ def pipelines(self) -> pkg_pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines + @property + def policies(self) -> pkg_catalog.PoliciesAPI: + """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies in Unity Catalog.""" + return self._policies + @property def policy_compliance_for_clusters(self) -> pkg_compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" @@ -817,16 +857,16 @@ def repos(self) -> pkg_workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos - @property - def request_for_access(self) -> pkg_catalog.RequestForAccessAPI: - """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" - return self._request_for_access - @property def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" return self._resource_quotas + @property + def rfa(self) -> pkg_catalog.RfaAPI: + """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" + return self._rfa + @property def schemas(self) -> pkg_catalog.SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" @@ -843,9 +883,9 @@ def service_principal_secrets_proxy(self) -> pkg_oauth2.ServicePrincipalSecretsP return self._service_principal_secrets_proxy @property - def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: + def service_principals_v2(self) -> pkg_iam.ServicePrincipalsV2API: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals + return self._service_principals_v2 @property def serving_endpoints(self) -> ServingEndpointsExt: @@ -899,12 +939,17 @@ def tag_assignments(self) -> pkg_tags.TagAssignmentsAPI: @property def tag_policies(self) -> pkg_tags.TagPoliciesAPI: - """The Tag Policy API allows you to manage tag policies in Databricks.""" + """The Tag Policy API allows you to manage policies for governed tags in Databricks.""" return self._tag_policies + @property + def temporary_path_credentials(self) -> pkg_catalog.TemporaryPathCredentialsAPI: + """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud storage locations registered in Databricks.""" + return self._temporary_path_credentials + @property def temporary_table_credentials(self) -> pkg_catalog.TemporaryTableCredentialsAPI: - """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" + """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locations where table data is stored in Databricks.""" return self._temporary_table_credentials @property @@ -918,9 +963,9 @@ def tokens(self) -> pkg_settings.TokensAPI: return self._tokens @property - def users(self) -> pkg_iam.UsersAPI: + def users_v2(self) -> pkg_iam.UsersV2API: """User identities recognized by Databricks and represented by email addresses.""" - return self._users + return self._users_v2 @property def vector_search_endpoints(self) -> pkg_vectorsearch.VectorSearchEndpointsAPI: @@ -972,6 +1017,21 @@ def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" return self._workspace_iam_v2 + @property + def groups(self) -> pkg_iam.GroupsAPI: + """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" + return self._groups + + @property + def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: + """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" + return self._service_principals + + @property + def users(self) -> pkg_iam.UsersAPI: + """User identities recognized by Databricks and represented by email addresses.""" + return self._users + def get_workspace_id(self) -> int: """Get the workspace ID of the workspace that this client is connected to.""" response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"]) @@ -1053,7 +1113,7 @@ def __init__( self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client) self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client) self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client) - self._groups = pkg_iam.AccountGroupsAPI(self._api_client) + self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client) self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client) self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client) self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client) @@ -1066,19 +1126,23 @@ def __init__( self._published_app_integration = pkg_oauth2.PublishedAppIntegrationAPI(self._api_client) self._service_principal_federation_policy = pkg_oauth2.ServicePrincipalFederationPolicyAPI(self._api_client) self._service_principal_secrets = pkg_oauth2.ServicePrincipalSecretsAPI(self._api_client) - self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) + self._service_principals_v2 = pkg_iam.AccountServicePrincipalsV2API(self._api_client) self._settings = pkg_settings.AccountSettingsAPI(self._api_client) self._settings_v2 = pkg_settingsv2.AccountSettingsV2API(self._api_client) self._storage = pkg_provisioning.StorageAPI(self._api_client) self._storage_credentials = pkg_catalog.AccountStorageCredentialsAPI(self._api_client) self._usage_dashboards = pkg_billing.UsageDashboardsAPI(self._api_client) - self._users = pkg_iam.AccountUsersAPI(self._api_client) + self._usage_policy = pkg_billing.UsagePolicyAPI(self._api_client) + self._users_v2 = pkg_iam.AccountUsersV2API(self._api_client) self._vpc_endpoints = pkg_provisioning.VpcEndpointsAPI(self._api_client) self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client) self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client) self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) self._budgets = pkg_billing.BudgetsAPI(self._api_client) + self._groups = pkg_iam.AccountGroupsAPI(self._api_client) + self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) + self._users = pkg_iam.AccountUsersAPI(self._api_client) @property def config(self) -> client.Config: @@ -1124,9 +1188,9 @@ def federation_policy(self) -> pkg_oauth2.AccountFederationPolicyAPI: return self._federation_policy @property - def groups(self) -> pkg_iam.AccountGroupsAPI: + def groups_v2(self) -> pkg_iam.AccountGroupsV2API: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" - return self._groups + return self._groups_v2 @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: @@ -1189,9 +1253,9 @@ def service_principal_secrets(self) -> pkg_oauth2.ServicePrincipalSecretsAPI: return self._service_principal_secrets @property - def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: + def service_principals_v2(self) -> pkg_iam.AccountServicePrincipalsV2API: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" - return self._service_principals + return self._service_principals_v2 @property def settings(self) -> pkg_settings.AccountSettingsAPI: @@ -1219,9 +1283,14 @@ def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: return self._usage_dashboards @property - def users(self) -> pkg_iam.AccountUsersAPI: + def usage_policy(self) -> pkg_billing.UsagePolicyAPI: + """A service serves REST API about Usage policies.""" + return self._usage_policy + + @property + def users_v2(self) -> pkg_iam.AccountUsersV2API: """User identities recognized by Databricks and represented by email addresses.""" - return self._users + return self._users_v2 @property def vpc_endpoints(self) -> pkg_provisioning.VpcEndpointsAPI: @@ -1253,6 +1322,21 @@ def budgets(self) -> pkg_billing.BudgetsAPI: """These APIs manage budget configurations for this account.""" return self._budgets + @property + def groups(self) -> pkg_iam.AccountGroupsAPI: + """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" + return self._groups + + @property + def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: + """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" + return self._service_principals + + @property + def users(self) -> pkg_iam.AccountUsersAPI: + """User identities recognized by Databricks and represented by email addresses.""" + return self._users + def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. diff --git a/databricks/sdk/service/agentbricks.py b/databricks/sdk/service/agentbricks.py index 8cda7ac26..25175acf0 100755 --- a/databricks/sdk/service/agentbricks.py +++ b/databricks/sdk/service/agentbricks.py @@ -227,9 +227,9 @@ def create_custom_llm( :param instructions: str Instructions for the custom LLM to follow :param agent_artifact_path: str (optional) - Optional: UC path for agent artifacts. If you are using a dataset that you only have read - permissions, please provide a destination path where you have write permissions. Please provide this - in catalog.schema format. + This will soon be deprecated!! Optional: UC path for agent artifacts. If you are using a dataset + that you only have read permissions, please provide a destination path where you have write + permissions. Please provide this in catalog.schema format. :param datasets: List[:class:`Dataset`] (optional) Datasets used for training and evaluating the model, not for inference. Currently, only 1 dataset is accepted. diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 99d60a4dd..9e6ed6716 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -32,8 +32,8 @@ class App: app_status: Optional[ApplicationStatus] = None budget_policy_id: Optional[str] = None - """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the - new usage_policy_id is properly populated and used.""" + + compute_size: Optional[ComputeSize] = None compute_status: Optional[ComputeStatus] = None @@ -51,8 +51,6 @@ class App: """The description of the app.""" effective_budget_policy_id: Optional[str] = None - """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the - new usage_policy_id is properly populated and used.""" effective_usage_policy_id: Optional[str] = None @@ -101,6 +99,8 @@ def as_dict(self) -> dict: body["app_status"] = self.app_status.as_dict() if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size.value if self.compute_status: body["compute_status"] = self.compute_status.as_dict() if self.create_time is not None: @@ -156,6 +156,8 @@ def as_shallow_dict(self) -> dict: body["app_status"] = self.app_status if self.budget_policy_id is not None: body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size if self.compute_status: body["compute_status"] = self.compute_status if self.create_time is not None: @@ -209,6 +211,7 @@ def from_dict(cls, d: Dict[str, Any]) -> App: active_deployment=_from_dict(d, "active_deployment", AppDeployment), app_status=_from_dict(d, "app_status", ApplicationStatus), budget_policy_id=d.get("budget_policy_id", None), + compute_size=_enum(d, "compute_size", ComputeSize), compute_status=_from_dict(d, "compute_status", ComputeStatus), create_time=d.get("create_time", None), creator=d.get("creator", None), @@ -501,6 +504,312 @@ def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentStatus: return cls(message=d.get("message", None), state=_enum(d, "state", AppDeploymentState)) +@dataclass +class AppManifest: + """App manifest definition""" + + version: int + """The manifest schema version, for now only 1 is allowed""" + + name: str + """Name of the app defined by manifest author / publisher""" + + description: Optional[str] = None + """Description of the app defined by manifest author / publisher""" + + resource_specs: Optional[List[AppManifestAppResourceSpec]] = None + + def as_dict(self) -> dict: + """Serializes the AppManifest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.resource_specs: + body["resource_specs"] = [v.as_dict() for v in self.resource_specs] + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.resource_specs: + body["resource_specs"] = self.resource_specs + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifest: + """Deserializes the AppManifest from a dictionary.""" + return cls( + description=d.get("description", None), + name=d.get("name", None), + resource_specs=_repeated_dict(d, "resource_specs", AppManifestAppResourceSpec), + version=d.get("version", None), + ) + + +@dataclass +class AppManifestAppResourceJobSpec: + permission: AppManifestAppResourceJobSpecJobPermission + """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", + "CAN_MANAGE_RUN", "CAN_VIEW".""" + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceJobSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceJobSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceJobSpec: + """Deserializes the AppManifestAppResourceJobSpec from a dictionary.""" + return cls(permission=_enum(d, "permission", AppManifestAppResourceJobSpecJobPermission)) + + +class AppManifestAppResourceJobSpecJobPermission(Enum): + + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_RUN = "CAN_MANAGE_RUN" + CAN_VIEW = "CAN_VIEW" + IS_OWNER = "IS_OWNER" + + +@dataclass +class AppManifestAppResourceSecretSpec: + permission: AppManifestAppResourceSecretSpecSecretPermission + """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission + must be one of: "READ", "WRITE", "MANAGE".""" + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceSecretSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceSecretSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSecretSpec: + """Deserializes the AppManifestAppResourceSecretSpec from a dictionary.""" + return cls(permission=_enum(d, "permission", AppManifestAppResourceSecretSpecSecretPermission)) + + +class AppManifestAppResourceSecretSpecSecretPermission(Enum): + """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".""" + + MANAGE = "MANAGE" + READ = "READ" + WRITE = "WRITE" + + +@dataclass +class AppManifestAppResourceServingEndpointSpec: + permission: AppManifestAppResourceServingEndpointSpecServingEndpointPermission + """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", + "CAN_QUERY", "CAN_VIEW".""" + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceServingEndpointSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceServingEndpointSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceServingEndpointSpec: + """Deserializes the AppManifestAppResourceServingEndpointSpec from a dictionary.""" + return cls( + permission=_enum(d, "permission", AppManifestAppResourceServingEndpointSpecServingEndpointPermission) + ) + + +class AppManifestAppResourceServingEndpointSpecServingEndpointPermission(Enum): + + CAN_MANAGE = "CAN_MANAGE" + CAN_QUERY = "CAN_QUERY" + CAN_VIEW = "CAN_VIEW" + + +@dataclass +class AppManifestAppResourceSpec: + """AppResource related fields are copied from app.proto but excludes resource identifiers (e.g. + name, id, key, scope, etc.)""" + + name: str + """Name of the App Resource.""" + + description: Optional[str] = None + """Description of the App Resource.""" + + job_spec: Optional[AppManifestAppResourceJobSpec] = None + + secret_spec: Optional[AppManifestAppResourceSecretSpec] = None + + serving_endpoint_spec: Optional[AppManifestAppResourceServingEndpointSpec] = None + + sql_warehouse_spec: Optional[AppManifestAppResourceSqlWarehouseSpec] = None + + uc_securable_spec: Optional[AppManifestAppResourceUcSecurableSpec] = None + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: + body["description"] = self.description + if self.job_spec: + body["job_spec"] = self.job_spec.as_dict() + if self.name is not None: + body["name"] = self.name + if self.secret_spec: + body["secret_spec"] = self.secret_spec.as_dict() + if self.serving_endpoint_spec: + body["serving_endpoint_spec"] = self.serving_endpoint_spec.as_dict() + if self.sql_warehouse_spec: + body["sql_warehouse_spec"] = self.sql_warehouse_spec.as_dict() + if self.uc_securable_spec: + body["uc_securable_spec"] = self.uc_securable_spec.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: + body["description"] = self.description + if self.job_spec: + body["job_spec"] = self.job_spec + if self.name is not None: + body["name"] = self.name + if self.secret_spec: + body["secret_spec"] = self.secret_spec + if self.serving_endpoint_spec: + body["serving_endpoint_spec"] = self.serving_endpoint_spec + if self.sql_warehouse_spec: + body["sql_warehouse_spec"] = self.sql_warehouse_spec + if self.uc_securable_spec: + body["uc_securable_spec"] = self.uc_securable_spec + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSpec: + """Deserializes the AppManifestAppResourceSpec from a dictionary.""" + return cls( + description=d.get("description", None), + job_spec=_from_dict(d, "job_spec", AppManifestAppResourceJobSpec), + name=d.get("name", None), + secret_spec=_from_dict(d, "secret_spec", AppManifestAppResourceSecretSpec), + serving_endpoint_spec=_from_dict(d, "serving_endpoint_spec", AppManifestAppResourceServingEndpointSpec), + sql_warehouse_spec=_from_dict(d, "sql_warehouse_spec", AppManifestAppResourceSqlWarehouseSpec), + uc_securable_spec=_from_dict(d, "uc_securable_spec", AppManifestAppResourceUcSecurableSpec), + ) + + +@dataclass +class AppManifestAppResourceSqlWarehouseSpec: + permission: AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission + """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", + "IS_OWNER".""" + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceSqlWarehouseSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceSqlWarehouseSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceSqlWarehouseSpec: + """Deserializes the AppManifestAppResourceSqlWarehouseSpec from a dictionary.""" + return cls(permission=_enum(d, "permission", AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission)) + + +class AppManifestAppResourceSqlWarehouseSpecSqlWarehousePermission(Enum): + + CAN_MANAGE = "CAN_MANAGE" + CAN_USE = "CAN_USE" + IS_OWNER = "IS_OWNER" + + +@dataclass +class AppManifestAppResourceUcSecurableSpec: + securable_type: AppManifestAppResourceUcSecurableSpecUcSecurableType + + permission: AppManifestAppResourceUcSecurableSpecUcSecurablePermission + + def as_dict(self) -> dict: + """Serializes the AppManifestAppResourceUcSecurableSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission.value + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppManifestAppResourceUcSecurableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission is not None: + body["permission"] = self.permission + if self.securable_type is not None: + body["securable_type"] = self.securable_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppManifestAppResourceUcSecurableSpec: + """Deserializes the AppManifestAppResourceUcSecurableSpec from a dictionary.""" + return cls( + permission=_enum(d, "permission", AppManifestAppResourceUcSecurableSpecUcSecurablePermission), + securable_type=_enum(d, "securable_type", AppManifestAppResourceUcSecurableSpecUcSecurableType), + ) + + +class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum): + + MANAGE = "MANAGE" + READ_VOLUME = "READ_VOLUME" + WRITE_VOLUME = "WRITE_VOLUME" + + +class AppManifestAppResourceUcSecurableSpecUcSecurableType(Enum): + + VOLUME = "VOLUME" + + @dataclass class AppPermission: inherited: Optional[bool] = None @@ -630,6 +939,8 @@ class AppResource: description: Optional[str] = None """Description of the App Resource.""" + genie_space: Optional[AppResourceGenieSpace] = None + job: Optional[AppResourceJob] = None secret: Optional[AppResourceSecret] = None @@ -647,6 +958,8 @@ def as_dict(self) -> dict: body["database"] = self.database.as_dict() if self.description is not None: body["description"] = self.description + if self.genie_space: + body["genie_space"] = self.genie_space.as_dict() if self.job: body["job"] = self.job.as_dict() if self.name is not None: @@ -668,6 +981,8 @@ def as_shallow_dict(self) -> dict: body["database"] = self.database if self.description is not None: body["description"] = self.description + if self.genie_space: + body["genie_space"] = self.genie_space if self.job: body["job"] = self.job if self.name is not None: @@ -688,6 +1003,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AppResource: return cls( database=_from_dict(d, "database", AppResourceDatabase), description=d.get("description", None), + genie_space=_from_dict(d, "genie_space", AppResourceGenieSpace), job=_from_dict(d, "job", AppResourceJob), name=d.get("name", None), secret=_from_dict(d, "secret", AppResourceSecret), @@ -742,6 +1058,54 @@ class AppResourceDatabaseDatabasePermission(Enum): CAN_CONNECT_AND_CREATE = "CAN_CONNECT_AND_CREATE" +@dataclass +class AppResourceGenieSpace: + name: str + + space_id: str + + permission: AppResourceGenieSpaceGenieSpacePermission + + def as_dict(self) -> dict: + """Serializes the AppResourceGenieSpace into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission.value + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceGenieSpace into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission + if self.space_id is not None: + body["space_id"] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppResourceGenieSpace: + """Deserializes the AppResourceGenieSpace from a dictionary.""" + return cls( + name=d.get("name", None), + permission=_enum(d, "permission", AppResourceGenieSpaceGenieSpacePermission), + space_id=d.get("space_id", None), + ) + + +class AppResourceGenieSpaceGenieSpacePermission(Enum): + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_RUN = "CAN_RUN" + CAN_VIEW = "CAN_VIEW" + + @dataclass class AppResourceJob: id: str @@ -971,6 +1335,112 @@ class AppResourceUcSecurableUcSecurableType(Enum): VOLUME = "VOLUME" +@dataclass +class AppUpdate: + budget_policy_id: Optional[str] = None + + compute_size: Optional[ComputeSize] = None + + description: Optional[str] = None + + resources: Optional[List[AppResource]] = None + + status: Optional[AppUpdateUpdateStatus] = None + + usage_policy_id: Optional[str] = None + + user_api_scopes: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the AppUpdate into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size.value + if self.description is not None: + body["description"] = self.description + if self.resources: + body["resources"] = [v.as_dict() for v in self.resources] + if self.status: + body["status"] = self.status.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + if self.user_api_scopes: + body["user_api_scopes"] = [v for v in self.user_api_scopes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppUpdate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_size is not None: + body["compute_size"] = self.compute_size + if self.description is not None: + body["description"] = self.description + if self.resources: + body["resources"] = self.resources + if self.status: + body["status"] = self.status + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id + if self.user_api_scopes: + body["user_api_scopes"] = self.user_api_scopes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppUpdate: + """Deserializes the AppUpdate from a dictionary.""" + return cls( + budget_policy_id=d.get("budget_policy_id", None), + compute_size=_enum(d, "compute_size", ComputeSize), + description=d.get("description", None), + resources=_repeated_dict(d, "resources", AppResource), + status=_from_dict(d, "status", AppUpdateUpdateStatus), + usage_policy_id=d.get("usage_policy_id", None), + user_api_scopes=d.get("user_api_scopes", None), + ) + + +@dataclass +class AppUpdateUpdateStatus: + message: Optional[str] = None + + state: Optional[AppUpdateUpdateStatusUpdateState] = None + + def as_dict(self) -> dict: + """Serializes the AppUpdateUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppUpdateUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppUpdateUpdateStatus: + """Deserializes the AppUpdateUpdateStatus from a dictionary.""" + return cls(message=d.get("message", None), state=_enum(d, "state", AppUpdateUpdateStatusUpdateState)) + + +class AppUpdateUpdateStatusUpdateState(Enum): + + FAILED = "FAILED" + IN_PROGRESS = "IN_PROGRESS" + NOT_UPDATED = "NOT_UPDATED" + SUCCEEDED = "SUCCEEDED" + + class ApplicationState(Enum): CRASHED = "CRASHED" @@ -1011,6 +1481,12 @@ def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: return cls(message=d.get("message", None), state=_enum(d, "state", ApplicationState)) +class ComputeSize(Enum): + + LARGE = "LARGE" + MEDIUM = "MEDIUM" + + class ComputeState(Enum): ACTIVE = "ACTIVE" @@ -1054,6 +1530,81 @@ def from_dict(cls, d: Dict[str, Any]) -> ComputeStatus: return cls(message=d.get("message", None), state=_enum(d, "state", ComputeState)) +@dataclass +class CustomTemplate: + name: str + """The name of the template. It must contain only alphanumeric characters, hyphens, underscores, + and whitespaces. It must be unique within the workspace.""" + + git_repo: str + """The Git repository URL that the template resides in.""" + + path: str + """The path to the template within the Git repository.""" + + manifest: AppManifest + """The manifest of the template. It defines fields and default values when installing the template.""" + + git_provider: str + """The Git provider of the template.""" + + creator: Optional[str] = None + + description: Optional[str] = None + """The description of the template.""" + + def as_dict(self) -> dict: + """Serializes the CustomTemplate into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_repo is not None: + body["git_repo"] = self.git_repo + if self.manifest: + body["manifest"] = self.manifest.as_dict() + if self.name is not None: + body["name"] = self.name + if self.path is not None: + body["path"] = self.path + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomTemplate into a shallow dictionary of its immediate attributes.""" + body = {} + if self.creator is not None: + body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_repo is not None: + body["git_repo"] = self.git_repo + if self.manifest: + body["manifest"] = self.manifest + if self.name is not None: + body["name"] = self.name + if self.path is not None: + body["path"] = self.path + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomTemplate: + """Deserializes the CustomTemplate from a dictionary.""" + return cls( + creator=d.get("creator", None), + description=d.get("description", None), + git_provider=d.get("git_provider", None), + git_repo=d.get("git_repo", None), + manifest=_from_dict(d, "manifest", AppManifest), + name=d.get("name", None), + path=d.get("path", None), + ) + + @dataclass class GetAppPermissionLevelsResponse: permission_levels: Optional[List[AppPermissionsDescription]] = None @@ -1145,6 +1696,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) +@dataclass +class ListCustomTemplatesResponse: + next_page_token: Optional[str] = None + """Pagination token to request the next page of custom templates.""" + + templates: Optional[List[CustomTemplate]] = None + + def as_dict(self) -> dict: + """Serializes the ListCustomTemplatesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.templates: + body["templates"] = [v.as_dict() for v in self.templates] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListCustomTemplatesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.templates: + body["templates"] = self.templates + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListCustomTemplatesResponse: + """Deserializes the ListCustomTemplatesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), templates=_repeated_dict(d, "templates", CustomTemplate) + ) + + class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -1186,6 +1770,37 @@ def wait_get_app_active( attempt += 1 raise TimeoutError(f"timed out after {timeout}: {status_message}") + def wait_get_update_app_succeeded( + self, app_name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[AppUpdate], None]] = None + ) -> AppUpdate: + deadline = time.time() + timeout.total_seconds() + target_states = (AppUpdateUpdateStatusUpdateState.SUCCEEDED,) + failure_states = (AppUpdateUpdateStatusUpdateState.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_update(app_name=app_name) + status = poll.status.state + status_message = f"current status: {status}" + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"app_name={app_name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + def wait_get_deployment_app_succeeded( self, app_name: str, @@ -1278,6 +1893,45 @@ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App: return self.create(app=app, no_compute=no_compute).result(timeout=timeout) + def create_update(self, app_name: str, update_mask: str, *, app: Optional[App] = None) -> Wait[AppUpdate]: + """Creates an app update and starts the update process. The update process is asynchronous and the status + of the update can be checked with the GetAppUpdate method. + + :param app_name: str + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + :param app: :class:`App` (optional) + + :returns: + Long-running operation waiter for :class:`AppUpdate`. + See :method:wait_get_update_app_succeeded for more details. + """ + body = {} + if app is not None: + body["app"] = app.as_dict() + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/apps/{app_name}/update", body=body, headers=headers) + return Wait(self.wait_get_update_app_succeeded, response=AppUpdate.from_dict(op_response), app_name=app_name) + + def create_update_and_wait( + self, app_name: str, update_mask: str, *, app: Optional[App] = None, timeout=timedelta(minutes=20) + ) -> AppUpdate: + return self.create_update(app=app, app_name=app_name, update_mask=update_mask).result(timeout=timeout) + def delete(self, name: str) -> App: """Deletes an app. @@ -1391,6 +2045,22 @@ def get_permissions(self, app_name: str) -> AppPermissions: res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}", headers=headers) return AppPermissions.from_dict(res) + def get_update(self, app_name: str) -> AppUpdate: + """Gets the status of an app update. + + :param app_name: str + The name of the app. + + :returns: :class:`AppUpdate` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/apps/{app_name}/update", headers=headers) + return AppUpdate.from_dict(res) + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: """Lists all apps in the workspace. @@ -1560,3 +2230,108 @@ def update_permissions( res = self._api.do("PATCH", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) return AppPermissions.from_dict(res) + + +class AppsSettingsAPI: + """Apps Settings manage the settings for the Apps service on a customer's Databricks instance.""" + + def __init__(self, api_client): + self._api = api_client + + def create_custom_template(self, template: CustomTemplate) -> CustomTemplate: + """Creates a custom template. + + :param template: :class:`CustomTemplate` + + :returns: :class:`CustomTemplate` + """ + body = template.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/apps-settings/templates", body=body, headers=headers) + return CustomTemplate.from_dict(res) + + def delete_custom_template(self, name: str) -> CustomTemplate: + """Deletes the custom template with the specified name. + + :param name: str + The name of the custom template. + + :returns: :class:`CustomTemplate` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("DELETE", f"/api/2.0/apps-settings/templates/{name}", headers=headers) + return CustomTemplate.from_dict(res) + + def get_custom_template(self, name: str) -> CustomTemplate: + """Gets the custom template with the specified name. + + :param name: str + The name of the custom template. + + :returns: :class:`CustomTemplate` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/apps-settings/templates/{name}", headers=headers) + return CustomTemplate.from_dict(res) + + def list_custom_templates( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[CustomTemplate]: + """Lists all custom templates in the workspace. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of custom templates. Requests first page if absent. + + :returns: Iterator over :class:`CustomTemplate` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/apps-settings/templates", query=query, headers=headers) + if "templates" in json: + for v in json["templates"]: + yield CustomTemplate.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_custom_template(self, name: str, template: CustomTemplate) -> CustomTemplate: + """Updates the custom template with the specified name. Note that the template name cannot be updated. + + :param name: str + The name of the template. It must contain only alphanumeric characters, hyphens, underscores, and + whitespaces. It must be unique within the workspace. + :param template: :class:`CustomTemplate` + + :returns: :class:`CustomTemplate` + """ + body = template.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/apps-settings/templates/{name}", body=body, headers=headers) + return CustomTemplate.from_dict(res) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 2e118457a..c349adbee 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1034,6 +1034,50 @@ def from_dict(cls, d: Dict[str, Any]) -> ListBudgetPoliciesResponse: ) +@dataclass +class ListUsagePoliciesResponse: + """A list of usage policies.""" + + next_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the next page.""" + + policies: Optional[List[UsagePolicy]] = None + + previous_page_token: Optional[str] = None + """A token that can be sent as `page_token` to retrieve the previous page.""" + + def as_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsagePoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsagePoliciesResponse: + """Deserializes the ListUsagePoliciesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + policies=_repeated_dict(d, "policies", UsagePolicy), + previous_page_token=d.get("previous_page_token", None), + ) + + class LogDeliveryConfigStatus(Enum): """* Log Delivery Status @@ -1434,6 +1478,59 @@ class UsageDashboardType(Enum): USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" +@dataclass +class UsagePolicy: + """Contains the UsagePolicy details (same structure as BudgetPolicy)""" + + binding_workspace_ids: Optional[List[int]] = None + """List of workspaces that this usage policy will be exclusively bound to.""" + + custom_tags: Optional[List[compute.CustomPolicyTag]] = None + """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" + + policy_id: Optional[str] = None + """The Id of the policy. This field is generated by Databricks and globally unique.""" + + policy_name: Optional[str] = None + """The name of the policy.""" + + def as_dict(self) -> dict: + """Serializes the UsagePolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids] + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UsagePolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.binding_workspace_ids: + body["binding_workspace_ids"] = self.binding_workspace_ids + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UsagePolicy: + """Deserializes the UsagePolicy from a dictionary.""" + return cls( + binding_workspace_ids=d.get("binding_workspace_ids", None), + custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag), + policy_id=d.get("policy_id", None), + policy_name=d.get("policy_name", None), + ) + + @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None @@ -2065,3 +2162,144 @@ def get( res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/dashboard", query=query, headers=headers) return GetBillingUsageDashboardResponse.from_dict(res) + + +class UsagePolicyAPI: + """A service serves REST API about Usage policies""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, policy: Optional[UsagePolicy] = None, request_id: Optional[str] = None) -> UsagePolicy: + """Creates a new usage policy. + + :param policy: :class:`UsagePolicy` (optional) + The policy to create. `policy_id` needs to be empty as it will be generated + :param request_id: str (optional) + A unique identifier for this request. Restricted to 36 ASCII characters. + + :returns: :class:`UsagePolicy` + """ + body = {} + if policy is not None: + body["policy"] = policy.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", body=body, headers=headers + ) + return UsagePolicy.from_dict(res) + + def delete(self, policy_id: str): + """Deletes a usage policy + + :param policy_id: str + The Id of the policy. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers) + + def get(self, policy_id: str) -> UsagePolicy: + """Retrieves a usage policy by it's ID. + + :param policy_id: str + The Id of the policy. + + :returns: :class:`UsagePolicy` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", headers=headers + ) + return UsagePolicy.from_dict(res) + + def list( + self, + *, + filter_by: Optional[Filter] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_spec: Optional[SortSpec] = None, + ) -> Iterator[UsagePolicy]: + """Lists all usage policies. Policies are returned in the alphabetically ascending order of their names. + + :param filter_by: :class:`Filter` (optional) + A filter to apply to the list of policies. + :param page_size: int (optional) + The maximum number of usage policies to return. + :param page_token: str (optional) + A page token, received from a previous `ListUsagePolicies` call. + :param sort_spec: :class:`SortSpec` (optional) + The sort specification. + + :returns: Iterator over :class:`UsagePolicy` + """ + + query = {} + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if sort_spec is not None: + query["sort_spec"] = sort_spec.as_dict() + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/usage-policies", query=query, headers=headers + ) + if "policies" in json: + for v in json["policies"]: + yield UsagePolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, policy_id: str, policy: UsagePolicy, *, limit_config: Optional[LimitConfig] = None) -> UsagePolicy: + """Updates a usage policy + + :param policy_id: str + The Id of the policy. This field is generated by Databricks and globally unique. + :param policy: :class:`UsagePolicy` + The policy to update. `creator_user_id` cannot be specified in the request. + :param limit_config: :class:`LimitConfig` (optional) + DEPRECATED. This is redundant field as LimitConfig is part of the UsagePolicy + + :returns: :class:`UsagePolicy` + """ + body = policy.as_dict() + query = {} + if limit_config is not None: + query["limit_config"] = limit_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/accounts/{self._api.account_id}/usage-policies/{policy_id}", + query=query, + body=body, + headers=headers, + ) + return UsagePolicy.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 550e1103c..0686cb640 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -63,8 +63,191 @@ def from_dict(cls, d: Dict[str, Any]) -> AccessRequestDestinations: ) +@dataclass +class AccountsCreateMetastoreAssignmentResponse: + """The metastore assignment was successfully created.""" + + def as_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignmentResponse: + """Deserializes the AccountsCreateMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsCreateMetastoreResponse: + metastore_info: Optional[MetastoreInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreResponse: + """Deserializes the AccountsCreateMetastoreResponse from a dictionary.""" + return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) + + +@dataclass +class AccountsCreateStorageCredentialInfo: + credential_info: Optional[StorageCredentialInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsCreateStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsCreateStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredentialInfo: + """Deserializes the AccountsCreateStorageCredentialInfo from a dictionary.""" + return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) + + +@dataclass +class AccountsDeleteMetastoreAssignmentResponse: + """The metastore assignment was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteMetastoreAssignmentResponse: + """Deserializes the AccountsDeleteMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsDeleteMetastoreResponse: + """The metastore was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteMetastoreResponse: + """Deserializes the AccountsDeleteMetastoreResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsDeleteStorageCredentialResponse: + """The storage credential was successfully deleted.""" + + def as_dict(self) -> dict: + """Serializes the AccountsDeleteStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsDeleteStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsDeleteStorageCredentialResponse: + """Deserializes the AccountsDeleteStorageCredentialResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsGetMetastoreResponse: + """The metastore was successfully returned.""" + + metastore_info: Optional[MetastoreInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsGetMetastoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsGetMetastoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastore_info: + body["metastore_info"] = self.metastore_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsGetMetastoreResponse: + """Deserializes the AccountsGetMetastoreResponse from a dictionary.""" + return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) + + +@dataclass +class AccountsListMetastoresResponse: + """Metastores were returned successfully.""" + + metastores: Optional[List[MetastoreInfo]] = None + """An array of metastore information objects.""" + + def as_dict(self) -> dict: + """Serializes the AccountsListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastores: + body["metastores"] = [v.as_dict() for v in self.metastores] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastores: + body["metastores"] = self.metastores + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsListMetastoresResponse: + """Deserializes the AccountsListMetastoresResponse from a dictionary.""" + return cls(metastores=_repeated_dict(d, "metastores", MetastoreInfo)) + + @dataclass class AccountsMetastoreAssignment: + """The workspace metastore assignment was successfully returned.""" + metastore_assignment: Optional[MetastoreAssignment] = None def as_dict(self) -> dict: @@ -88,50 +271,100 @@ def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreAssignment: @dataclass -class AccountsMetastoreInfo: +class AccountsStorageCredentialInfo: + """The storage credential was successfully retrieved.""" + + credential_info: Optional[StorageCredentialInfo] = None + + def as_dict(self) -> dict: + """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_info: + body["credential_info"] = self.credential_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: + """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" + return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) + + +@dataclass +class AccountsUpdateMetastoreAssignmentResponse: + """The metastore assignment was successfully updated.""" + + def as_dict(self) -> dict: + """Serializes the AccountsUpdateMetastoreAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountsUpdateMetastoreAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignmentResponse: + """Deserializes the AccountsUpdateMetastoreAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class AccountsUpdateMetastoreResponse: + """The metastore update request succeeded.""" + metastore_info: Optional[MetastoreInfo] = None def as_dict(self) -> dict: - """Serializes the AccountsMetastoreInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the AccountsUpdateMetastoreResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.metastore_info: body["metastore_info"] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the AccountsUpdateMetastoreResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.metastore_info: body["metastore_info"] = self.metastore_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreInfo: - """Deserializes the AccountsMetastoreInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreResponse: + """Deserializes the AccountsUpdateMetastoreResponse from a dictionary.""" return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) @dataclass -class AccountsStorageCredentialInfo: +class AccountsUpdateStorageCredentialResponse: + """The storage credential was successfully updated.""" + credential_info: Optional[StorageCredentialInfo] = None def as_dict(self) -> dict: - """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the AccountsUpdateStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.credential_info: body["credential_info"] = self.credential_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the AccountsUpdateStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.credential_info: body["credential_info"] = self.credential_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: - """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredentialResponse: + """Deserializes the AccountsUpdateStorageCredentialResponse from a dictionary.""" return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) @@ -807,12 +1040,18 @@ class CatalogInfo: connection_name: Optional[str] = None """The name of the connection to an external data source.""" + conversion_info: Optional[ConversionInfo] = None + """Status of conversion of FOREIGN catalog to UC Native catalog.""" + created_at: Optional[int] = None """Time at which this catalog was created, in epoch milliseconds.""" created_by: Optional[str] = None """Username of catalog creator.""" + dr_replication_info: Optional[DrReplicationInfo] = None + """Disaster Recovery replication state snapshot.""" + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None @@ -874,10 +1113,14 @@ def as_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info.as_dict() if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info.as_dict() if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() if self.enable_predictive_optimization is not None: @@ -925,10 +1168,14 @@ def as_shallow_dict(self) -> dict: body["comment"] = self.comment if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.conversion_info: + body["conversion_info"] = self.conversion_info if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.dr_replication_info: + body["dr_replication_info"] = self.dr_replication_info if self.effective_predictive_optimization_flag: body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag if self.enable_predictive_optimization is not None: @@ -973,8 +1220,10 @@ def from_dict(cls, d: Dict[str, Any]) -> CatalogInfo: catalog_type=_enum(d, "catalog_type", CatalogType), comment=d.get("comment", None), connection_name=d.get("connection_name", None), + conversion_info=_from_dict(d, "conversion_info", ConversionInfo), created_at=d.get("created_at", None), created_by=d.get("created_by", None), + dr_replication_info=_from_dict(d, "dr_replication_info", DrReplicationInfo), effective_predictive_optimization_flag=_from_dict( d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag ), @@ -1207,6 +1456,55 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) +@dataclass +class ColumnMaskOptions: + function_name: str + """The fully qualified name of the column mask function. The function is called on each row of the + target table. The function's first argument and its return type should match the type of the + masked column. Required on create and update.""" + + on_column: str + """The alias of the column to be masked. The alias must refer to one of matched columns. The values + of the column is passed to the column mask function as the first argument. Required on create + and update.""" + + using: Optional[List[FunctionArgument]] = None + """Optional list of column aliases or constant literals to be passed as additional arguments to the + column mask function. The type of each column should match the positional argument of the column + mask function.""" + + def as_dict(self) -> dict: + """Serializes the ColumnMaskOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.on_column is not None: + body["on_column"] = self.on_column + if self.using: + body["using"] = [v.as_dict() for v in self.using] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ColumnMaskOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.on_column is not None: + body["on_column"] = self.on_column + if self.using: + body["using"] = self.using + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ColumnMaskOptions: + """Deserializes the ColumnMaskOptions from a dictionary.""" + return cls( + function_name=d.get("function_name", None), + on_column=d.get("on_column", None), + using=_repeated_dict(d, "using", FunctionArgument), + ) + + @dataclass class ColumnRelationship: source: Optional[str] = None @@ -1464,7 +1762,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 37""" + """Next Id: 38""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1474,6 +1772,7 @@ class ConnectionType(Enum): HTTP = "HTTP" MYSQL = "MYSQL" ORACLE = "ORACLE" + PALANTIR = "PALANTIR" POSTGRESQL = "POSTGRESQL" POWER_BI = "POWER_BI" REDSHIFT = "REDSHIFT" @@ -1536,6 +1835,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: ) +@dataclass +class ConversionInfo: + """Status of conversion of FOREIGN entity into UC Native entity.""" + + state: Optional[ConversionInfoState] = None + """The conversion state of the resource.""" + + def as_dict(self) -> dict: + """Serializes the ConversionInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConversionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConversionInfo: + """Deserializes the ConversionInfo from a dictionary.""" + return cls(state=_enum(d, "state", ConversionInfoState)) + + +class ConversionInfoState(Enum): + + COMPLETED = "COMPLETED" + IN_PROGRESS = "IN_PROGRESS" + + @dataclass class CreateAccessRequest: behalf_of: Optional[Principal] = None @@ -1624,29 +1956,156 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAccessRequestResponse: @dataclass -class CreateFunction: +class CreateAccountsMetastore: name: str - """Name of function, relative to parent schema.""" - - catalog_name: str - """Name of parent catalog.""" - - schema_name: str - """Name of parent schema relative to its parent catalog.""" + """The user-specified name of the metastore.""" - input_params: FunctionParameterInfos + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - data_type: ColumnTypeName - """Scalar function return data type.""" + storage_root: Optional[str] = None + """The storage root URL for metastore""" - full_data_type: str - """Pretty printed function data type.""" + def as_dict(self) -> dict: + """Serializes the CreateAccountsMetastore into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + return body - routine_body: CreateFunctionRoutineBody - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccountsMetastore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccountsMetastore: + """Deserializes the CreateAccountsMetastore from a dictionary.""" + return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) + + +@dataclass +class CreateAccountsStorageCredential: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the CreateAccountsStorageCredential into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateAccountsStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateAccountsStorageCredential: + """Deserializes the CreateAccountsStorageCredential from a dictionary.""" + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + name=d.get("name", None), + read_only=d.get("read_only", None), + ) + + +@dataclass +class CreateFunction: + name: str + """Name of function, relative to parent schema.""" + + catalog_name: str + """Name of parent Catalog.""" + + schema_name: str + """Name of parent Schema relative to its parent Catalog.""" + + input_params: FunctionParameterInfos + """Function input parameters.""" + + data_type: ColumnTypeName + """Scalar function return data type.""" + + full_data_type: str + """Pretty printed function data type.""" + + routine_body: CreateFunctionRoutineBody + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the **external_language** field, and the **return_params** of the function cannot + be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be + **NO_SQL**.""" routine_definition: str """Function body.""" @@ -1685,7 +2144,7 @@ class CreateFunction: """Table function return parameters.""" routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + """function dependencies.""" sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" @@ -1813,74 +2272,28 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: class CreateFunctionParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" S = "S" class CreateFunctionRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" EXTERNAL = "EXTERNAL" SQL = "SQL" class CreateFunctionSecurityType(Enum): - """The security type of the function.""" DEFINER = "DEFINER" class CreateFunctionSqlDataAccess(Enum): - """Function SQL data access.""" CONTAINS_SQL = "CONTAINS_SQL" NO_SQL = "NO_SQL" READS_SQL_DATA = "READS_SQL_DATA" -@dataclass -class CreateMetastore: - name: str - """The user-specified name of the metastore.""" - - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - - storage_root: Optional[str] = None - """The storage root URL for metastore""" - - def as_dict(self) -> dict: - """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: - """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) - - @dataclass class CreateMetastoreAssignment: metastore_id: str @@ -1984,119 +2397,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRequestExternalLineage: ) -@dataclass -class CreateResponse: - def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" - return cls() - - -@dataclass -class CreateStorageCredential: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the created credential.""" - - def as_dict(self) -> dict: - """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: - """Deserializes the CreateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) - - @dataclass class CredentialDependency: """A credential that is dependent on a SQL object.""" @@ -2535,9 +2835,27 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteMonitorResponse: @dataclass -class DeleteRequestExternalLineage: - source: ExternalLineageObject - """Source object of the external lineage relationship.""" +class DeletePolicyResponse: + def as_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: + """Deserializes the DeletePolicyResponse from a dictionary.""" + return cls() + + +@dataclass +class DeleteRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" target: ExternalLineageObject """Target object of the external lineage relationship.""" @@ -2751,6 +3069,38 @@ def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: return cls() +@dataclass +class DrReplicationInfo: + """Metadata related to Disaster Recovery.""" + + status: Optional[DrReplicationStatus] = None + + def as_dict(self) -> dict: + """Serializes the DrReplicationInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DrReplicationInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DrReplicationInfo: + """Deserializes the DrReplicationInfo from a dictionary.""" + return cls(status=_enum(d, "status", DrReplicationStatus)) + + +class DrReplicationStatus(Enum): + + DR_REPLICATION_STATUS_PRIMARY = "DR_REPLICATION_STATUS_PRIMARY" + DR_REPLICATION_STATUS_SECONDARY = "DR_REPLICATION_STATUS_SECONDARY" + + @dataclass class EffectivePermissionsList: next_page_token: Optional[str] = None @@ -2833,8 +3183,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): - """The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" CATALOG = "CATALOG" SCHEMA = "SCHEMA" @@ -2974,16 +3322,15 @@ class EntityTagAssignment: """Represents a tag assignment to an entity""" entity_name: str - """Required. The fully qualified structured name of the entity to which the tag is assigned. The - entity name should follow the format of: entity_type/fully_qualified_entity_name. eg. - catalogs/my_catalog, schemas/my_catalog.my_schema, - columns/my_catalog.my_schema.my_table.my_column. When containing segments with special - characters (e.g. '/'), the whole segment must be wrapped with backticks. For example, - columns/catalog.schema.table.\`column/a\`""" + """The fully qualified name of the entity to which the tag is assigned""" tag_key: str """The key of the tag""" + entity_type: str + """The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, + tables, columns, volumes.""" + tag_value: Optional[str] = None """The value of the tag""" @@ -2992,6 +3339,8 @@ def as_dict(self) -> dict: body = {} if self.entity_name is not None: body["entity_name"] = self.entity_name + if self.entity_type is not None: + body["entity_type"] = self.entity_type if self.tag_key is not None: body["tag_key"] = self.tag_key if self.tag_value is not None: @@ -3003,6 +3352,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.entity_name is not None: body["entity_name"] = self.entity_name + if self.entity_type is not None: + body["entity_type"] = self.entity_type if self.tag_key is not None: body["tag_key"] = self.tag_key if self.tag_value is not None: @@ -3013,7 +3364,10 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EntityTagAssignment: """Deserializes the EntityTagAssignment from a dictionary.""" return cls( - entity_name=d.get("entity_name", None), tag_key=d.get("tag_key", None), tag_value=d.get("tag_value", None) + entity_name=d.get("entity_name", None), + entity_type=d.get("entity_type", None), + tag_key=d.get("tag_key", None), + tag_value=d.get("tag_value", None), ) @@ -3619,7 +3973,8 @@ class ExternalLocationInfo: sufficient.""" file_event_queue: Optional[FileEventQueue] = None - """File event queue settings.""" + """File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly + one of the documented properties.""" isolation_mode: Optional[IsolationMode] = None @@ -4043,6 +4398,38 @@ def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: ) +@dataclass +class FunctionArgument: + alias: Optional[str] = None + """The alias of a matched column.""" + + constant: Optional[str] = None + """A constant literal.""" + + def as_dict(self) -> dict: + """Serializes the FunctionArgument into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.constant is not None: + body["constant"] = self.constant + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FunctionArgument into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.constant is not None: + body["constant"] = self.constant + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FunctionArgument: + """Deserializes the FunctionArgument from a dictionary.""" + return cls(alias=d.get("alias", None), constant=d.get("constant", None)) + + @dataclass class FunctionDependency: """A function that is dependent on a SQL object.""" @@ -4078,7 +4465,7 @@ class FunctionInfo: through the BROWSE privilege when include_browse is enabled in the request.""" catalog_name: Optional[str] = None - """Name of parent catalog.""" + """Name of parent Catalog.""" comment: Optional[str] = None """User-provided free-form text description.""" @@ -4102,12 +4489,13 @@ class FunctionInfo: """Pretty printed function data type.""" full_name: Optional[str] = None - """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" + """Full name of Function, in form of **catalog_name**.**schema_name**.**function_name**""" function_id: Optional[str] = None """Id of Function, relative to parent schema.""" input_params: Optional[FunctionParameterInfos] = None + """Function input parameters.""" is_deterministic: Optional[bool] = None """Whether the function is deterministic.""" @@ -4122,7 +4510,7 @@ class FunctionInfo: """Name of function, relative to parent schema.""" owner: Optional[str] = None - """Username of current owner of function.""" + """Username of current owner of the function.""" parameter_style: Optional[FunctionInfoParameterStyle] = None """Function parameter style. **S** is the value for SQL.""" @@ -4135,18 +4523,18 @@ class FunctionInfo: routine_body: Optional[FunctionInfoRoutineBody] = None """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + specified in the **external_language** field, and the **return_params** of the function cannot + be used (as **TABLE** return type is not supported), and the **sql_data_access** field must be **NO_SQL**.""" routine_definition: Optional[str] = None """Function body.""" routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + """function dependencies.""" schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" + """Name of parent Schema relative to its parent Catalog.""" security_type: Optional[FunctionInfoSecurityType] = None """Function security type.""" @@ -4161,10 +4549,10 @@ class FunctionInfo: """List of schemes whose objects can be referenced without qualification.""" updated_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" + """Time at which this function was last modified, in epoch milliseconds.""" updated_by: Optional[str] = None - """Username of user who last modified function.""" + """Username of user who last modified the function.""" def as_dict(self) -> dict: """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" @@ -4334,29 +4722,22 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: class FunctionInfoParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" S = "S" class FunctionInfoRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" EXTERNAL = "EXTERNAL" SQL = "SQL" class FunctionInfoSecurityType(Enum): - """The security type of the function.""" DEFINER = "DEFINER" class FunctionInfoSqlDataAccess(Enum): - """Function SQL data access.""" CONTAINS_SQL = "CONTAINS_SQL" NO_SQL = "NO_SQL" @@ -4366,12 +4747,13 @@ class FunctionInfoSqlDataAccess(Enum): @dataclass class FunctionParameterInfo: name: str - """Name of parameter.""" + """Name of Parameter.""" type_text: str """Full data type spec, SQL/catalogString text.""" type_name: ColumnTypeName + """Name of type (INT, STRUCT, MAP, etc.)""" position: int """Ordinal position of column (starting at position 0).""" @@ -4383,8 +4765,10 @@ class FunctionParameterInfo: """Default value of the parameter.""" parameter_mode: Optional[FunctionParameterMode] = None + """Function parameter mode.""" parameter_type: Optional[FunctionParameterType] = None + """Function parameter type.""" type_interval_type: Optional[str] = None """Format of IntervalType.""" @@ -4478,7 +4862,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None - """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" @@ -4501,13 +4884,11 @@ def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: class FunctionParameterMode(Enum): - """The mode of the function parameter.""" IN = "IN" class FunctionParameterType(Enum): - """The type of function parameter.""" COLUMN = "COLUMN" PARAM = "PARAM" @@ -4575,6 +4956,77 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: ) +@dataclass +class GenerateTemporaryPathCredentialResponse: + aws_temp_credentials: Optional[AwsCredentials] = None + + azure_aad: Optional[AzureActiveDirectoryToken] = None + + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None + + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" + + gcp_oauth_token: Optional[GcpOauthToken] = None + + r2_temp_credentials: Optional[R2Credentials] = None + + url: Optional[str] = None + """The URL of the storage path accessible by the temporary credential.""" + + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryPathCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryPathCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryPathCredentialResponse: + """Deserializes the GenerateTemporaryPathCredentialResponse from a dictionary.""" + return cls( + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), + url=d.get("url", None), + ) + + @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """The Azure cloud options to customize the requested temporary credential""" @@ -5017,7 +5469,7 @@ class LineageDirection(Enum): @dataclass class ListAccountMetastoreAssignmentsResponse: - """The list of workspaces to which the given metastore is assigned.""" + """The metastore assignments were successfully returned.""" workspace_ids: Optional[List[int]] = None @@ -5043,6 +5495,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse @dataclass class ListAccountStorageCredentialsResponse: + """The metastore storage credentials were successfully returned.""" + storage_credentials: Optional[List[StorageCredentialInfo]] = None """An array of metastore storage credentials.""" @@ -5410,6 +5864,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: ) +@dataclass +class ListPoliciesResponse: + next_page_token: Optional[str] = None + """Optional opaque token for continuing pagination. `page_token` should be set to this value for + the next request to retrieve the next page of results.""" + + policies: Optional[List[PolicyInfo]] = None + """The list of retrieved policies.""" + + def as_dict(self) -> dict: + """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: + """Deserializes the ListPoliciesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", PolicyInfo)) + + @dataclass class ListQuotasResponse: next_page_token: Optional[str] = None @@ -5680,6 +6167,38 @@ def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) +@dataclass +class MatchColumn: + alias: Optional[str] = None + """Optional alias of the matched column.""" + + condition: Optional[str] = None + """The condition expression used to match a table column.""" + + def as_dict(self) -> dict: + """Serializes the MatchColumn into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.condition is not None: + body["condition"] = self.condition + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MatchColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.condition is not None: + body["condition"] = self.condition + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MatchColumn: + """Deserializes the MatchColumn from a dictionary.""" + return cls(alias=d.get("alias", None), condition=d.get("condition", None)) + + class MatchType(Enum): """The artifact pattern matching type""" @@ -5695,7 +6214,8 @@ class MetastoreAssignment: """The unique ID of the metastore.""" default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore.""" + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" def as_dict(self) -> dict: """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" @@ -5912,10 +6432,6 @@ class ModelVersionInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the model version""" - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - catalog_name: Optional[str] = None """The name of the catalog containing the model version""" @@ -5974,8 +6490,6 @@ def as_dict(self) -> dict: body = {} if self.aliases: body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only if self.catalog_name is not None: body["catalog_name"] = self.catalog_name if self.comment is not None: @@ -6017,8 +6531,6 @@ def as_shallow_dict(self) -> dict: body = {} if self.aliases: body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only if self.catalog_name is not None: body["catalog_name"] = self.catalog_name if self.comment is not None: @@ -6060,7 +6572,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" return cls( aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), catalog_name=d.get("catalog_name", None), comment=d.get("comment", None), created_at=d.get("created_at", None), @@ -6082,11 +6593,9 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: class ModelVersionInfoStatus(Enum): - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" FAILED_REGISTRATION = "FAILED_REGISTRATION" + MODEL_VERSION_STATUS_UNKNOWN = "MODEL_VERSION_STATUS_UNKNOWN" PENDING_REGISTRATION = "PENDING_REGISTRATION" READY = "READY" @@ -7251,6 +7760,13 @@ class OptionSpecOptionType(Enum): OPTION_STRING = "OPTION_STRING" +class PathOperation(Enum): + + PATH_CREATE_TABLE = "PATH_CREATE_TABLE" + PATH_READ = "PATH_READ" + PATH_READ_WRITE = "PATH_READ_WRITE" + + @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None @@ -7372,51 +7888,223 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: @dataclass -class PrimaryKeyConstraint: - name: str - """The name of the constraint.""" +class PolicyInfo: + to_principals: List[str] + """List of user or group names that the policy applies to. Required on create and optional on + update.""" - child_columns: List[str] - """Column names for this constraint.""" + for_securable_type: SecurableType + """Type of securables that the policy should take effect on. Only `TABLE` is supported at this + moment. Required on create and optional on update.""" - rely: Optional[bool] = None - """True if the constraint is RELY, false or unset if NORELY.""" + policy_type: PolicyType + """Type of the policy. Required on create and ignored on update.""" - timeseries_columns: Optional[List[str]] = None - """Column names that represent a timeseries.""" + column_mask: Optional[ColumnMaskOptions] = None + """Options for column mask policies. Valid only if `policy_type` is `POLICY_TYPE_COLUMN_MASK`. + Required on create and optional on update. When specified on update, the new options will + replace the existing options as a whole.""" + + comment: Optional[str] = None + """Optional description of the policy.""" + + created_at: Optional[int] = None + """Time at which the policy was created, in epoch milliseconds. Output only.""" + + created_by: Optional[str] = None + """Username of the user who created the policy. Output only.""" + + except_principals: Optional[List[str]] = None + """Optional list of user or group names that should be excluded from the policy.""" + + id: Optional[str] = None + """Unique identifier of the policy. This field is output only and is generated by the system.""" + + match_columns: Optional[List[MatchColumn]] = None + """Optional list of condition expressions used to match table columns. Only valid when + `for_securable_type` is `TABLE`. When specified, the policy only applies to tables whose columns + satisfy all match conditions.""" + + name: Optional[str] = None + """Name of the policy. Required on create and optional on update. To rename the policy, set `name` + to a different value on update.""" + + on_securable_fullname: Optional[str] = None + """Full name of the securable on which the policy is defined. Required on create and ignored on + update.""" + + on_securable_type: Optional[SecurableType] = None + """Type of the securable on which the policy is defined. Only `CATALOG`, `SCHEMA` and `TABLE` are + supported at this moment. Required on create and ignored on update.""" + + row_filter: Optional[RowFilterOptions] = None + """Options for row filter policies. Valid only if `policy_type` is `POLICY_TYPE_ROW_FILTER`. + Required on create and optional on update. When specified on update, the new options will + replace the existing options as a whole.""" + + updated_at: Optional[int] = None + """Time at which the policy was last modified, in epoch milliseconds. Output only.""" + + updated_by: Optional[str] = None + """Username of the user who last modified the policy. Output only.""" + + when_condition: Optional[str] = None + """Optional condition when the policy should take effect.""" def as_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" + """Serializes the PolicyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] + if self.column_mask: + body["column_mask"] = self.column_mask.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.except_principals: + body["except_principals"] = [v for v in self.except_principals] + if self.for_securable_type is not None: + body["for_securable_type"] = self.for_securable_type.value + if self.id is not None: + body["id"] = self.id + if self.match_columns: + body["match_columns"] = [v.as_dict() for v in self.match_columns] if self.name is not None: body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = [v for v in self.timeseries_columns] + if self.on_securable_fullname is not None: + body["on_securable_fullname"] = self.on_securable_fullname + if self.on_securable_type is not None: + body["on_securable_type"] = self.on_securable_type.value + if self.policy_type is not None: + body["policy_type"] = self.policy_type.value + if self.row_filter: + body["row_filter"] = self.row_filter.as_dict() + if self.to_principals: + body["to_principals"] = [v for v in self.to_principals] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.when_condition is not None: + body["when_condition"] = self.when_condition return body def as_shallow_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" + """Serializes the PolicyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: - body["child_columns"] = self.child_columns + if self.column_mask: + body["column_mask"] = self.column_mask + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.except_principals: + body["except_principals"] = self.except_principals + if self.for_securable_type is not None: + body["for_securable_type"] = self.for_securable_type + if self.id is not None: + body["id"] = self.id + if self.match_columns: + body["match_columns"] = self.match_columns if self.name is not None: body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = self.timeseries_columns + if self.on_securable_fullname is not None: + body["on_securable_fullname"] = self.on_securable_fullname + if self.on_securable_type is not None: + body["on_securable_type"] = self.on_securable_type + if self.policy_type is not None: + body["policy_type"] = self.policy_type + if self.row_filter: + body["row_filter"] = self.row_filter + if self.to_principals: + body["to_principals"] = self.to_principals + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.when_condition is not None: + body["when_condition"] = self.when_condition return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: - """Deserializes the PrimaryKeyConstraint from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: + """Deserializes the PolicyInfo from a dictionary.""" return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), + column_mask=_from_dict(d, "column_mask", ColumnMaskOptions), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + except_principals=d.get("except_principals", None), + for_securable_type=_enum(d, "for_securable_type", SecurableType), + id=d.get("id", None), + match_columns=_repeated_dict(d, "match_columns", MatchColumn), + name=d.get("name", None), + on_securable_fullname=d.get("on_securable_fullname", None), + on_securable_type=_enum(d, "on_securable_type", SecurableType), + policy_type=_enum(d, "policy_type", PolicyType), + row_filter=_from_dict(d, "row_filter", RowFilterOptions), + to_principals=d.get("to_principals", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + when_condition=d.get("when_condition", None), + ) + + +class PolicyType(Enum): + + POLICY_TYPE_COLUMN_MASK = "POLICY_TYPE_COLUMN_MASK" + POLICY_TYPE_ROW_FILTER = "POLICY_TYPE_ROW_FILTER" + + +@dataclass +class PrimaryKeyConstraint: + name: str + """The name of the constraint.""" + + child_columns: List[str] + """Column names for this constraint.""" + + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" + + timeseries_columns: Optional[List[str]] = None + """Column names that represent a timeseries.""" + + def as_dict(self) -> dict: + """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = [v for v in self.timeseries_columns] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = self.timeseries_columns + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: + """Deserializes the PrimaryKeyConstraint from a dictionary.""" + return cls( + child_columns=d.get("child_columns", None), + name=d.get("name", None), rely=d.get("rely", None), timeseries_columns=d.get("timeseries_columns", None), ) @@ -7490,6 +8178,7 @@ class Privilege(Enum): CREATE_VOLUME = "CREATE_VOLUME" EXECUTE = "EXECUTE" EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" + EXTERNAL_USE_SCHEMA = "EXTERNAL_USE_SCHEMA" MANAGE = "MANAGE" MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" MODIFY = "MODIFY" @@ -7770,11 +8459,21 @@ def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: @dataclass class RegisteredModelAlias: - """Registered model alias.""" - alias_name: Optional[str] = None """Name of the alias, e.g. 'champion' or 'latest_stable'""" + catalog_name: Optional[str] = None + """The name of the catalog containing the model version""" + + id: Optional[str] = None + """The unique identifier of the alias""" + + model_name: Optional[str] = None + """The name of the parent registered model of the model version, relative to parent schema""" + + schema_name: Optional[str] = None + """The name of the schema containing the model version, relative to parent catalog""" + version_num: Optional[int] = None """Integer version number of the model version to which this alias points.""" @@ -7783,6 +8482,14 @@ def as_dict(self) -> dict: body = {} if self.alias_name is not None: body["alias_name"] = self.alias_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.id is not None: + body["id"] = self.id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name if self.version_num is not None: body["version_num"] = self.version_num return body @@ -7792,6 +8499,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.alias_name is not None: body["alias_name"] = self.alias_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.id is not None: + body["id"] = self.id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name if self.version_num is not None: body["version_num"] = self.version_num return body @@ -7799,7 +8514,14 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) + return cls( + alias_name=d.get("alias_name", None), + catalog_name=d.get("catalog_name", None), + id=d.get("id", None), + model_name=d.get("model_name", None), + schema_name=d.get("schema_name", None), + version_num=d.get("version_num", None), + ) @dataclass @@ -7934,6 +8656,42 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: ) +@dataclass +class RowFilterOptions: + function_name: str + """The fully qualified name of the row filter function. The function is called on each row of the + target table. It should return a boolean value indicating whether the row should be visible to + the user. Required on create and update.""" + + using: Optional[List[FunctionArgument]] = None + """Optional list of column aliases or constant literals to be passed as arguments to the row filter + function. The type of each column should match the positional argument of the row filter + function.""" + + def as_dict(self) -> dict: + """Serializes the RowFilterOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using: + body["using"] = [v.as_dict() for v in self.using] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RowFilterOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using: + body["using"] = self.using + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RowFilterOptions: + """Deserializes the RowFilterOptions from a dictionary.""" + return cls(function_name=d.get("function_name", None), using=_repeated_dict(d, "using", FunctionArgument)) + + @dataclass class SchemaInfo: """Next ID: 40""" @@ -8149,6 +8907,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): + """Latest kind: CONNECTION_SHAREPOINT_OAUTH_M2M = 264; Next id:265""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" @@ -8159,6 +8918,7 @@ class SecurableKind(Enum): TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" TABLE_DELTA_UNIFORM_HUDI_EXTERNAL = "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL = "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_DELTASHARING = "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_DELTASHARING" TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL = ( "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" ) @@ -8189,6 +8949,7 @@ class SecurableKind(Enum): TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" + TABLE_FOREIGN_PALANTIR = "TABLE_FOREIGN_PALANTIR" TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" @@ -8206,6 +8967,7 @@ class SecurableKind(Enum): TABLE_MATERIALIZED_VIEW = "TABLE_MATERIALIZED_VIEW" TABLE_MATERIALIZED_VIEW_DELTASHARING = "TABLE_MATERIALIZED_VIEW_DELTASHARING" TABLE_METRIC_VIEW = "TABLE_METRIC_VIEW" + TABLE_METRIC_VIEW_DELTASHARING = "TABLE_METRIC_VIEW_DELTASHARING" TABLE_ONLINE_VECTOR_INDEX_DIRECT = "TABLE_ONLINE_VECTOR_INDEX_DIRECT" TABLE_ONLINE_VECTOR_INDEX_REPLICA = "TABLE_ONLINE_VECTOR_INDEX_REPLICA" TABLE_ONLINE_VIEW = "TABLE_ONLINE_VIEW" @@ -8562,7 +9324,7 @@ class SystemSchemaInfo: state: str """The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | - ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" + ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE | MANAGED""" def as_dict(self) -> dict: """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" @@ -8609,6 +9371,7 @@ class SystemType(Enum): SAP = "SAP" SERVICENOW = "SERVICENOW" SNOWFLAKE = "SNOWFLAKE" + STREAM_NATIVE = "STREAM_NATIVE" TABLEAU = "TABLEAU" TERADATA = "TERADATA" WORKDAY = "WORKDAY" @@ -9240,50 +10003,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: @dataclass -class UpdateAssignmentResponse: - def as_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: - """Deserializes the UpdateAssignmentResponse from a dictionary.""" - return cls() - - -@dataclass -class UpdateCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" - - def as_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.workspaces: - body["workspaces"] = self.workspaces - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: - """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) - - -@dataclass -class UpdateMetastore: +class UpdateAccountsMetastore: delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" @@ -9294,12 +10014,6 @@ class UpdateMetastore: delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - id: Optional[str] = None - """Unique ID of the metastore.""" - - new_name: Optional[str] = None - """New name for the metastore.""" - owner: Optional[str] = None """The owner of the metastore.""" @@ -9310,7 +10024,7 @@ class UpdateMetastore: """UUID of storage credential to access the metastore storage_root.""" def as_dict(self) -> dict: - """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateAccountsMetastore into a dictionary suitable for use as a JSON request body.""" body = {} if self.delta_sharing_organization_name is not None: body["delta_sharing_organization_name"] = self.delta_sharing_organization_name @@ -9320,10 +10034,6 @@ def as_dict(self) -> dict: ) if self.delta_sharing_scope is not None: body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.privilege_model_version is not None: @@ -9333,7 +10043,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateAccountsMetastore into a shallow dictionary of its immediate attributes.""" body = {} if self.delta_sharing_organization_name is not None: body["delta_sharing_organization_name"] = self.delta_sharing_organization_name @@ -9343,10 +10053,6 @@ def as_shallow_dict(self) -> dict: ) if self.delta_sharing_scope is not None: body["delta_sharing_scope"] = self.delta_sharing_scope - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.privilege_model_version is not None: @@ -9356,22 +10062,157 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: - """Deserializes the UpdateMetastore from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountsMetastore: + """Deserializes the UpdateAccountsMetastore from a dictionary.""" return cls( delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), delta_sharing_recipient_token_lifetime_in_seconds=d.get( "delta_sharing_recipient_token_lifetime_in_seconds", None ), delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - id=d.get("id", None), - new_name=d.get("new_name", None), owner=d.get("owner", None), privilege_model_version=d.get("privilege_model_version", None), storage_root_credential_id=d.get("storage_root_credential_id", None), ) +@dataclass +class UpdateAccountsStorageCredential: + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAccountsStorageCredential into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAccountsStorageCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountsStorageCredential: + """Deserializes the UpdateAccountsStorageCredential from a dictionary.""" + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + ) + + +@dataclass +class UpdateAssignmentResponse: + def as_dict(self) -> dict: + """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: + """Deserializes the UpdateAssignmentResponse from a dictionary.""" + return cls() + + +@dataclass +class UpdateCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" + + def as_dict(self) -> dict: + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspaces: + body["workspaces"] = self.workspaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: + """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) + + @dataclass class UpdateMetastoreAssignment: default_catalog_name: Optional[str] = None @@ -9496,152 +10337,26 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateRequestExternalLineage: id=d.get("id", None), properties=d.get("properties", None), source=_from_dict(d, "source", ExternalLineageObject), - target=_from_dict(d, "target", ExternalLineageObject), - ) - - -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - -@dataclass -class UpdateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" - - force: Optional[bool] = None - """Force update even if there are dependent external locations or external tables.""" - - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - name: Optional[str] = None - """Name of the storage credential.""" - - new_name: Optional[str] = None - """New name for the storage credential.""" - - owner: Optional[str] = None - """Username of current owner of credential.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the updated credential.""" - - def as_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + target=_from_dict(d, "target", ExternalLineageObject), + ) + + +@dataclass +class UpdateResponse: + def as_dict(self) -> dict: + """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: - """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: + """Deserializes the UpdateResponse from a dictionary.""" + return cls() @dataclass @@ -9855,6 +10570,11 @@ class VolumeInfo: """The unique identifier of the volume""" volume_type: Optional[VolumeType] = None + """The type of the volume. An external volume is located in the specified external location. A + managed volume is located in the default location which is specified by the parent schema, or + the parent catalog, or the Metastore. [Learn more] + + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" @@ -9959,11 +10679,6 @@ def from_dict(cls, d: Dict[str, Any]) -> VolumeInfo: class VolumeType(Enum): - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" EXTERNAL = "EXTERNAL" MANAGED = "MANAGED" @@ -10019,7 +10734,7 @@ def __init__(self, api_client): def create( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None - ): + ) -> AccountsCreateMetastoreAssignmentResponse: """Creates an assignment to a metastore for a workspace :param workspace_id: int @@ -10028,7 +10743,7 @@ def create( Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - + :returns: :class:`AccountsCreateMetastoreAssignmentResponse` """ body = {} if metastore_assignment is not None: @@ -10038,14 +10753,15 @@ def create( "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "POST", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", body=body, headers=headers, ) + return AccountsCreateMetastoreAssignmentResponse.from_dict(res) - def delete(self, workspace_id: int, metastore_id: str): + def delete(self, workspace_id: int, metastore_id: str) -> AccountsDeleteMetastoreAssignmentResponse: """Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int @@ -10053,23 +10769,24 @@ def delete(self, workspace_id: int, metastore_id: str): :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsDeleteMetastoreAssignmentResponse` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", headers=headers, ) + return AccountsDeleteMetastoreAssignmentResponse.from_dict(res) def get(self, workspace_id: int) -> AccountsMetastoreAssignment: """Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned - a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment - will not be found and a 404 returned. + a metastore, the mapping will be returned. If no metastore is assigned to the workspace, the + assignment will not be found and a 404 returned. :param workspace_id: int Workspace ID. @@ -10107,7 +10824,7 @@ def list(self, metastore_id: str) -> Iterator[int]: def update( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[UpdateMetastoreAssignment] = None - ): + ) -> AccountsUpdateMetastoreAssignmentResponse: """Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. @@ -10117,7 +10834,7 @@ def update( Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - + :returns: :class:`AccountsUpdateMetastoreAssignmentResponse` """ body = {} if metastore_assignment is not None: @@ -10127,12 +10844,13 @@ def update( "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", body=body, headers=headers, ) + return AccountsUpdateMetastoreAssignmentResponse.from_dict(res) class AccountMetastoresAPI: @@ -10142,12 +10860,12 @@ class AccountMetastoresAPI: def __init__(self, api_client): self._api = api_client - def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: + def create(self, *, metastore_info: Optional[CreateAccountsMetastore] = None) -> AccountsCreateMetastoreResponse: """Creates a Unity Catalog metastore. - :param metastore_info: :class:`CreateMetastore` (optional) + :param metastore_info: :class:`CreateAccountsMetastore` (optional) - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsCreateMetastoreResponse` """ body = {} if metastore_info is not None: @@ -10158,9 +10876,9 @@ def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> Account } res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/metastores", body=body, headers=headers) - return AccountsMetastoreInfo.from_dict(res) + return AccountsCreateMetastoreResponse.from_dict(res) - def delete(self, metastore_id: str, *, force: Optional[bool] = None): + def delete(self, metastore_id: str, *, force: Optional[bool] = None) -> AccountsDeleteMetastoreResponse: """Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str @@ -10168,7 +10886,7 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None): :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - + :returns: :class:`AccountsDeleteMetastoreResponse` """ query = {} @@ -10178,20 +10896,21 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None): "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", query=query, headers=headers, ) + return AccountsDeleteMetastoreResponse.from_dict(res) - def get(self, metastore_id: str) -> AccountsMetastoreInfo: + def get(self, metastore_id: str) -> AccountsGetMetastoreResponse: """Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsGetMetastoreResponse` """ headers = { @@ -10201,7 +10920,7 @@ def get(self, metastore_id: str) -> AccountsMetastoreInfo: res = self._api.do( "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", headers=headers ) - return AccountsMetastoreInfo.from_dict(res) + return AccountsGetMetastoreResponse.from_dict(res) def list(self) -> Iterator[MetastoreInfo]: """Gets all Unity Catalog metastores associated with an account specified by ID. @@ -10215,17 +10934,20 @@ def list(self) -> Iterator[MetastoreInfo]: } json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/metastores", headers=headers) - parsed = ListMetastoresResponse.from_dict(json).metastores + parsed = AccountsListMetastoresResponse.from_dict(json).metastores return parsed if parsed is not None else [] - def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: + def update( + self, metastore_id: str, *, metastore_info: Optional[UpdateAccountsMetastore] = None + ) -> AccountsUpdateMetastoreResponse: """Updates an existing Unity Catalog metastore. :param metastore_id: str Unity Catalog metastore ID - :param metastore_info: :class:`UpdateMetastore` (optional) + :param metastore_info: :class:`UpdateAccountsMetastore` (optional) + Properties of the metastore to change. - :returns: :class:`AccountsMetastoreInfo` + :returns: :class:`AccountsUpdateMetastoreResponse` """ body = {} if metastore_info is not None: @@ -10238,7 +10960,7 @@ def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", body=body, headers=headers ) - return AccountsMetastoreInfo.from_dict(res) + return AccountsUpdateMetastoreResponse.from_dict(res) class AccountStorageCredentialsAPI: @@ -10248,25 +10970,33 @@ def __init__(self, api_client): self._api = api_client def create( - self, metastore_id: str, *, credential_info: Optional[CreateStorageCredential] = None - ) -> AccountsStorageCredentialInfo: - """Creates a new storage credential. The request object is specific to the cloud: - - * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * - **GcpServiceAcountKey** for GCP credentials. + self, + metastore_id: str, + *, + credential_info: Optional[CreateAccountsStorageCredential] = None, + skip_validation: Optional[bool] = None, + ) -> AccountsCreateStorageCredentialInfo: + """Creates a new storage credential. The request object is specific to the cloud: - **AwsIamRole** for + AWS credentials - **AzureServicePrincipal** for Azure credentials - **GcpServiceAccountKey** for GCP + credentials - The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the + The caller must be a metastore admin and have the `CREATE_STORAGE_CREDENTIAL` privilege on the metastore. :param metastore_id: str Unity Catalog metastore ID - :param credential_info: :class:`CreateStorageCredential` (optional) + :param credential_info: :class:`CreateAccountsStorageCredential` (optional) + :param skip_validation: bool (optional) + Optional, default false. Supplying true to this argument skips validation of the created set of + credentials. - :returns: :class:`AccountsStorageCredentialInfo` + :returns: :class:`AccountsCreateStorageCredentialInfo` """ body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() + if skip_validation is not None: + body["skip_validation"] = skip_validation headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -10278,9 +11008,11 @@ def create( body=body, headers=headers, ) - return AccountsStorageCredentialInfo.from_dict(res) + return AccountsCreateStorageCredentialInfo.from_dict(res) - def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None): + def delete( + self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None + ) -> AccountsDeleteStorageCredentialResponse: """Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. @@ -10291,7 +11023,7 @@ def delete(self, metastore_id: str, storage_credential_name: str, *, force: Opti :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - + :returns: :class:`AccountsDeleteStorageCredentialResponse` """ query = {} @@ -10301,12 +11033,13 @@ def delete(self, metastore_id: str, storage_credential_name: str, *, force: Opti "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", query=query, headers=headers, ) + return AccountsDeleteStorageCredentialResponse.from_dict(res) def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo: """Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the @@ -10315,7 +11048,7 @@ def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorag :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str - Name of the storage credential. + Required. Name of the storage credential. :returns: :class:`AccountsStorageCredentialInfo` """ @@ -10357,22 +11090,27 @@ def update( metastore_id: str, storage_credential_name: str, *, - credential_info: Optional[UpdateStorageCredential] = None, - ) -> AccountsStorageCredentialInfo: + credential_info: Optional[UpdateAccountsStorageCredential] = None, + skip_validation: Optional[bool] = None, + ) -> AccountsUpdateStorageCredentialResponse: """Updates a storage credential on the metastore. The caller must be the owner of the storage credential. - If the caller is a metastore admin, only the __owner__ credential can be changed. + If the caller is a metastore admin, only the **owner** credential can be changed. :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - :param credential_info: :class:`UpdateStorageCredential` (optional) + :param credential_info: :class:`UpdateAccountsStorageCredential` (optional) + :param skip_validation: bool (optional) + Optional. Supplying true to this argument skips validation of the updated set of credentials. - :returns: :class:`AccountsStorageCredentialInfo` + :returns: :class:`AccountsUpdateStorageCredentialResponse` """ body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() + if skip_validation is not None: + body["skip_validation"] = skip_validation headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -10384,7 +11122,7 @@ def update( body=body, headers=headers, ) - return AccountsStorageCredentialInfo.from_dict(res) + return AccountsUpdateStorageCredentialResponse.from_dict(res) class ArtifactAllowlistsAPI: @@ -10474,6 +11212,8 @@ def create( *, comment: Optional[str] = None, connection_name: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, options: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, provider_name: Optional[str] = None, @@ -10489,6 +11229,10 @@ def create( User-provided free-form text description. :param connection_name: str (optional) The name of the connection to an external data source. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param options: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param properties: Dict[str,str] (optional) @@ -10509,6 +11253,10 @@ def create( body["comment"] = comment if connection_name is not None: body["connection_name"] = connection_name + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if name is not None: body["name"] = name if options is not None: @@ -10627,6 +11375,8 @@ def update( name: str, *, comment: Optional[str] = None, + conversion_info: Optional[ConversionInfo] = None, + dr_replication_info: Optional[DrReplicationInfo] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, @@ -10641,6 +11391,10 @@ def update( The name of the catalog. :param comment: str (optional) User-provided free-form text description. + :param conversion_info: :class:`ConversionInfo` (optional) + Status of conversion of FOREIGN catalog to UC Native catalog. + :param dr_replication_info: :class:`DrReplicationInfo` (optional) + Disaster Recovery replication state snapshot. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) Whether predictive optimization should be enabled for this object and objects under it. :param isolation_mode: :class:`CatalogIsolationMode` (optional) @@ -10659,6 +11413,10 @@ def update( body = {} if comment is not None: body["comment"] = comment + if conversion_info is not None: + body["conversion_info"] = conversion_info.as_dict() + if dr_replication_info is not None: + body["dr_replication_info"] = dr_replication_info.as_dict() if enable_predictive_optimization is not None: body["enable_predictive_optimization"] = enable_predictive_optimization.value if isolation_mode is not None: @@ -11204,13 +11962,25 @@ def validate_credential( class EntityTagAssignmentsAPI: - """Entity Tag Assignments provide a unified interface for managing tag assignments on Unity Catalog entities.""" + """Tags are attributes that include keys and optional values that you can use to organize and categorize + entities in Unity Catalog. Entity tagging is currently supported on catalogs, schemas, tables (including + views), columns, volumes. With these APIs, users can create, update, delete, and list tag assignments + across Unity Catalog entities""" def __init__(self, api_client): self._api = api_client def create(self, tag_assignment: EntityTagAssignment) -> EntityTagAssignment: - """Create an tag assignment for an Unity Catalog entity. + """Creates a tag assignment for an Unity Catalog entity. + + To add tags to Unity Catalog entities, you must own the entity or have the following privileges: - + **APPLY TAG** on the entity - **USE SCHEMA** on the entity's parent schema - **USE CATALOG** on the + entity's parent catalog + + To add a governed tag to Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** + permission on the tag policy. See [Manage tag policy permissions]. + + [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions :param tag_assignment: :class:`EntityTagAssignment` @@ -11225,15 +11995,23 @@ def create(self, tag_assignment: EntityTagAssignment) -> EntityTagAssignment: res = self._api.do("POST", "/api/2.1/unity-catalog/entity-tag-assignments", body=body, headers=headers) return EntityTagAssignment.from_dict(res) - def delete(self, entity_name: str, tag_key: str): - """Delete a tag assignment for an Unity Catalog entity. + def delete(self, entity_type: str, entity_name: str, tag_key: str): + """Deletes a tag assignment for an Unity Catalog entity by its key. + + To delete tags from Unity Catalog entities, you must own the entity or have the following privileges: + - **APPLY TAG** on the entity - **USE_SCHEMA** on the entity's parent schema - **USE_CATALOG** on the + entity's parent catalog + To delete a governed tag from Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** + permission on the tag policy. See [Manage tag policy permissions]. + + [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions + + :param entity_type: str + The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, + columns, volumes. :param entity_name: str - Required. The fully qualified structured name of the entity to which the tag is assigned. The entity - name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, - schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing - segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For - example, columns/catalog.schema.table.\`column/a\` + The fully qualified name of the entity to which the tag is assigned :param tag_key: str Required. The key of the tag to delete @@ -11245,18 +12023,19 @@ def delete(self, entity_name: str, tag_key: str): } self._api.do( - "DELETE", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", headers=headers + "DELETE", + f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", + headers=headers, ) - def get(self, entity_name: str, tag_key: str) -> EntityTagAssignment: - """Get a tag assignment for an Unity Catalog entity. + def get(self, entity_type: str, entity_name: str, tag_key: str) -> EntityTagAssignment: + """Gets a tag assignment for an Unity Catalog entity by tag key. + :param entity_type: str + The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, + columns, volumes. :param entity_name: str - Required. The fully qualified structured name of the entity to which the tag is assigned. The entity - name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, - schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing - segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For - example, columns/catalog.schema.table.\`column/a\` + The fully qualified name of the entity to which the tag is assigned :param tag_key: str Required. The key of the tag @@ -11268,21 +12047,22 @@ def get(self, entity_name: str, tag_key: str) -> EntityTagAssignment: } res = self._api.do( - "GET", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", headers=headers + "GET", + f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", + headers=headers, ) return EntityTagAssignment.from_dict(res) def list( - self, entity_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + self, entity_type: str, entity_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[EntityTagAssignment]: """List tag assignments for an Unity Catalog entity + :param entity_type: str + The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, + columns, volumes. :param entity_name: str - Required. The fully qualified structured name of the entity to which the tag is assigned. The entity - name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, - schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing - segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For - example, columns/catalog.schema.table.\`column/a\` + The fully qualified name of the entity to which the tag is assigned :param max_results: int (optional) Optional. Maximum number of tag assignments to return in a single page :param page_token: str (optional) @@ -11302,7 +12082,10 @@ def list( while True: json = self._api.do( - "GET", f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags", query=query, headers=headers + "GET", + f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags", + query=query, + headers=headers, ) if "tag_assignments" in json: for v in json["tag_assignments"]: @@ -11312,16 +12095,24 @@ def list( query["page_token"] = json["next_page_token"] def update( - self, entity_name: str, tag_key: str, tag_assignment: EntityTagAssignment, update_mask: str + self, entity_type: str, entity_name: str, tag_key: str, tag_assignment: EntityTagAssignment, update_mask: str ) -> EntityTagAssignment: - """Update a tag assignment for an Unity Catalog entity + """Updates an existing tag assignment for an Unity Catalog entity. + + To update tags to Unity Catalog entities, you must own the entity or have the following privileges: - + **APPLY TAG** on the entity - **USE SCHEMA** on the entity's parent schema - **USE CATALOG** on the + entity's parent catalog + + To update a governed tag to Unity Catalog entities, you must also have the **ASSIGN** or **MANAGE** + permission on the tag policy. See [Manage tag policy permissions]. + + [Manage tag policy permissions]: https://docs.databricks.com/aws/en/admin/tag-policies/manage-permissions + :param entity_type: str + The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, + columns, volumes. :param entity_name: str - Required. The fully qualified structured name of the entity to which the tag is assigned. The entity - name should follow the format of: entity_type/fully_qualified_entity_name. eg. catalogs/my_catalog, - schemas/my_catalog.my_schema, columns/my_catalog.my_schema.my_table.my_column. When containing - segments with special characters (e.g. '/'), the whole segment must be wrapped with backticks. For - example, columns/catalog.schema.table.\`column/a\` + The fully qualified name of the entity to which the tag is assigned :param tag_key: str The key of the tag :param tag_assignment: :class:`EntityTagAssignment` @@ -11349,7 +12140,7 @@ def update( res = self._api.do( "PATCH", - f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_name}/tags/{tag_key}", + f"/api/2.1/unity-catalog/entity-tag-assignments/{entity_type}/{entity_name}/tags/{tag_key}", query=query, body=body, headers=headers, @@ -11537,7 +12328,8 @@ def create( enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - File event queue settings. + File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly one + of the documented properties. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -11699,7 +12491,8 @@ def update( enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - File event queue settings. + File event queue settings. If `enable_file_events` is `true`, must be defined and have exactly one + of the documented properties. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) @@ -11930,7 +12723,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): :param name: str The fully-qualified name of the function (of the form - __catalog_name__.__schema_name__.__function__name__). + __catalog_name__.__schema_name__.__function__name__) . :param force: bool (optional) Force deletion even if the function is notempty. @@ -11940,9 +12733,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): query = {} if force is not None: query["force"] = force - headers = { - "Accept": "application/json", - } + headers = {} self._api.do("DELETE", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) @@ -12043,7 +12834,7 @@ def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) - Username of current owner of function. + Username of current owner of the function. :returns: :class:`FunctionInfo` """ @@ -12658,7 +13449,29 @@ def list( return query["page_token"] = json["next_page_token"] - def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo: + def update( + self, + full_name: str, + version: int, + *, + aliases: Optional[List[RegisteredModelAlias]] = None, + catalog_name: Optional[str] = None, + comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + id: Optional[str] = None, + metastore_id: Optional[str] = None, + model_name: Optional[str] = None, + model_version_dependencies: Optional[DependencyList] = None, + run_id: Optional[str] = None, + run_workspace_id: Optional[int] = None, + schema_name: Optional[str] = None, + source: Optional[str] = None, + status: Optional[ModelVersionInfoStatus] = None, + storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, + ) -> ModelVersionInfo: """Updates the specified model version. The caller must be a metastore admin or an owner of the parent registered model. For the latter case, @@ -12671,14 +13484,80 @@ def update(self, full_name: str, version: int, *, comment: Optional[str] = None) The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the model version + :param catalog_name: str (optional) + The name of the catalog containing the model version :param comment: str (optional) The comment attached to the model version + :param created_at: int (optional) + :param created_by: str (optional) + The identifier of the user who created the model version + :param id: str (optional) + The unique identifier of the model version + :param metastore_id: str (optional) + The unique identifier of the metastore containing the model version + :param model_name: str (optional) + The name of the parent registered model of the model version, relative to parent schema + :param model_version_dependencies: :class:`DependencyList` (optional) + Model version dependencies, for feature-store packaged models + :param run_id: str (optional) + MLflow run ID used when creating the model version, if ``source`` was generated by an experiment run + stored in an MLflow tracking server + :param run_workspace_id: int (optional) + ID of the Databricks workspace containing the MLflow run that generated this model version, if + applicable + :param schema_name: str (optional) + The name of the schema containing the model version, relative to parent catalog + :param source: str (optional) + URI indicating the location of the source artifacts (files) for the model version + :param status: :class:`ModelVersionInfoStatus` (optional) + Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model version is + finalized. Only model versions in READY status can be loaded for inference or served. + :param storage_location: str (optional) + The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + :param updated_by: str (optional) + The identifier of the user who updated the model version last time :returns: :class:`ModelVersionInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if catalog_name is not None: + body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if id is not None: + body["id"] = id + if metastore_id is not None: + body["metastore_id"] = metastore_id + if model_name is not None: + body["model_name"] = model_name + if model_version_dependencies is not None: + body["model_version_dependencies"] = model_version_dependencies.as_dict() + if run_id is not None: + body["run_id"] = run_id + if run_workspace_id is not None: + body["run_workspace_id"] = run_workspace_id + if schema_name is not None: + body["schema_name"] = schema_name + if source is not None: + body["source"] = source + if status is not None: + body["status"] = status.value + if storage_location is not None: + body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -12783,6 +13662,185 @@ def get(self, name: str) -> OnlineTable: return OnlineTable.from_dict(res) +class PoliciesAPI: + """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies + in Unity Catalog. With ABAC policies, access is controlled in a hierarchical and scalable manner, based on + data attributes rather than specific resources, enabling more flexible and comprehensive access control. + ABAC policies in Unity Catalog support conditions on securable properties, governance tags, and + environment contexts. Callers must have the `MANAGE` privilege on a securable to view, create, update, or + delete ABAC policies.""" + + def __init__(self, api_client): + self._api = api_client + + def create_policy(self, policy_info: PolicyInfo) -> PolicyInfo: + """Creates a new policy on a securable. The new policy applies to the securable and all its descendants. + + :param policy_info: :class:`PolicyInfo` + Required. The policy to create. + + :returns: :class:`PolicyInfo` + """ + body = policy_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/policies", body=body, headers=headers) + return PolicyInfo.from_dict(res) + + def delete_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> DeletePolicyResponse: + """Delete an ABAC policy defined on a securable. + + :param on_securable_type: str + Required. The type of the securable to delete the policy from. + :param on_securable_fullname: str + Required. The fully qualified name of the securable to delete the policy from. + :param name: str + Required. The name of the policy to delete + + :returns: :class:`DeletePolicyResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + headers=headers, + ) + return DeletePolicyResponse.from_dict(res) + + def get_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> PolicyInfo: + """Get the policy definition on a securable + + :param on_securable_type: str + Required. The type of the securable to retrieve the policy for. + :param on_securable_fullname: str + Required. The fully qualified name of securable to retrieve policy for. + :param name: str + Required. The name of the policy to retrieve. + + :returns: :class:`PolicyInfo` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + headers=headers, + ) + return PolicyInfo.from_dict(res) + + def list_policies( + self, + on_securable_type: str, + on_securable_fullname: str, + *, + include_inherited: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[PolicyInfo]: + """List all policies defined on a securable. Optionally, the list can include inherited policies defined + on the securable's parent schema or catalog. + + :param on_securable_type: str + Required. The type of the securable to list policies for. + :param on_securable_fullname: str + Required. The fully qualified name of securable to list policies for. + :param include_inherited: bool (optional) + Optional. Whether to include policies defined on parent securables. By default, the inherited + policies are not included. + :param max_results: int (optional) + Optional. Maximum number of policies to return on a single page (page length). - When not set or set + to 0, the page length is set to a server configured value (recommended); - When set to a value + greater than 0, the page length is the minimum of this value and a server configured value; + :param page_token: str (optional) + Optional. Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`PolicyInfo` + """ + + query = {} + if include_inherited is not None: + query["include_inherited"] = include_inherited + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}", + query=query, + headers=headers, + ) + if "policies" in json: + for v in json["policies"]: + yield PolicyInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_policy( + self, + on_securable_type: str, + on_securable_fullname: str, + name: str, + policy_info: PolicyInfo, + *, + update_mask: Optional[str] = None, + ) -> PolicyInfo: + """Update an ABAC policy on a securable. + + :param on_securable_type: str + Required. The type of the securable to update the policy for. + :param on_securable_fullname: str + Required. The fully qualified name of the securable to update the policy for. + :param name: str + Required. The name of the policy to update. + :param policy_info: :class:`PolicyInfo` + Optional fields to update. This is the request body for updating a policy. Use `update_mask` field + to specify which fields in the request is to be updated. - If `update_mask` is empty or "*", all + specified fields will be updated. - If `update_mask` is specified, only the fields specified in the + `update_mask` will be updated. If a field is specified in `update_mask` and not set in the request, + the field will be cleared. Users can use the update mask to explicitly unset optional fields such as + `exception_principals` and `when_condition`. + :param update_mask: str (optional) + Optional. The update mask field for specifying user intentions on which fields to update in the + request. + + :returns: :class:`PolicyInfo` + """ + body = policy_info.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + query=query, + body=body, + headers=headers, + ) + return PolicyInfo.from_dict(res) + + class QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. Most write operations @@ -13207,20 +14265,29 @@ class RegisteredModelsAPI: new model version, or update permissions on the registered model, users must be owners of the registered model. - Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that - specify a securable type, use "FUNCTION" as the securable type.""" + Note: The securable type for models is FUNCTION. When using REST APIs (e.g. tagging, grants) that specify + a securable type, use FUNCTION as the securable type.""" def __init__(self, api_client): self._api = api_client def create( self, - catalog_name: str, - schema_name: str, - name: str, *, + aliases: Optional[List[RegisteredModelAlias]] = None, + browse_only: Optional[bool] = None, + catalog_name: Optional[str] = None, comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + full_name: Optional[str] = None, + metastore_id: Optional[str] = None, + name: Optional[str] = None, + owner: Optional[str] = None, + schema_name: Optional[str] = None, storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, ) -> RegisteredModelInfo: """Creates a new registered model in Unity Catalog. @@ -13232,30 +14299,67 @@ def create( **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - :param catalog_name: str + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the registered model + :param browse_only: bool (optional) + Indicates whether the principal is limited to retrieving metadata for the associated object through + the BROWSE privilege when include_browse is enabled in the request. + :param catalog_name: str (optional) The name of the catalog where the schema and the registered model reside - :param schema_name: str + :param comment: str (optional) + The comment attached to the registered model + :param created_at: int (optional) + Creation timestamp of the registered model in milliseconds since the Unix epoch + :param created_by: str (optional) + The identifier of the user who created the registered model + :param full_name: str (optional) + The three-level (fully qualified) name of the registered model + :param metastore_id: str (optional) + The unique identifier of the metastore + :param name: str (optional) + The name of the registered model + :param owner: str (optional) + The identifier of the user who owns the registered model + :param schema_name: str (optional) The name of the schema where the registered model resides - :param name: str - The name of the registered model - :param comment: str (optional) - The comment attached to the registered model :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + Last-update timestamp of the registered model in milliseconds since the Unix epoch + :param updated_by: str (optional) + The identifier of the user who updated the registered model last time :returns: :class:`RegisteredModelInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if browse_only is not None: + body["browse_only"] = browse_only if catalog_name is not None: body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if full_name is not None: + body["full_name"] = full_name + if metastore_id is not None: + body["metastore_id"] = metastore_id if name is not None: body["name"] = name + if owner is not None: + body["owner"] = owner if schema_name is not None: body["schema_name"] = schema_name if storage_location is not None: body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -13413,7 +14517,7 @@ def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredM **USE_SCHEMA** privilege on the parent schema. :param full_name: str - Full name of the registered model + The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param version_num: int @@ -13438,9 +14542,20 @@ def update( self, full_name: str, *, + aliases: Optional[List[RegisteredModelAlias]] = None, + browse_only: Optional[bool] = None, + catalog_name: Optional[str] = None, comment: Optional[str] = None, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + metastore_id: Optional[str] = None, + name: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, + schema_name: Optional[str] = None, + storage_location: Optional[str] = None, + updated_at: Optional[int] = None, + updated_by: Optional[str] = None, ) -> RegisteredModelInfo: """Updates the specified registered model. @@ -13452,22 +14567,67 @@ def update( :param full_name: str The three-level (fully qualified) name of the registered model + :param aliases: List[:class:`RegisteredModelAlias`] (optional) + List of aliases associated with the registered model + :param browse_only: bool (optional) + Indicates whether the principal is limited to retrieving metadata for the associated object through + the BROWSE privilege when include_browse is enabled in the request. + :param catalog_name: str (optional) + The name of the catalog where the schema and the registered model reside :param comment: str (optional) The comment attached to the registered model + :param created_at: int (optional) + Creation timestamp of the registered model in milliseconds since the Unix epoch + :param created_by: str (optional) + The identifier of the user who created the registered model + :param metastore_id: str (optional) + The unique identifier of the metastore + :param name: str (optional) + The name of the registered model :param new_name: str (optional) New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model + :param schema_name: str (optional) + The name of the schema where the registered model resides + :param storage_location: str (optional) + The storage location on the cloud under which model version data files are stored + :param updated_at: int (optional) + Last-update timestamp of the registered model in milliseconds since the Unix epoch + :param updated_by: str (optional) + The identifier of the user who updated the registered model last time :returns: :class:`RegisteredModelInfo` """ body = {} + if aliases is not None: + body["aliases"] = [v.as_dict() for v in aliases] + if browse_only is not None: + body["browse_only"] = browse_only + if catalog_name is not None: + body["catalog_name"] = catalog_name if comment is not None: body["comment"] = comment + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if metastore_id is not None: + body["metastore_id"] = metastore_id + if name is not None: + body["name"] = name if new_name is not None: body["new_name"] = new_name if owner is not None: body["owner"] = owner + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + if updated_at is not None: + body["updated_at"] = updated_at + if updated_by is not None: + body["updated_by"] = updated_by headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -13477,7 +14637,80 @@ def update( return RegisteredModelInfo.from_dict(res) -class RequestForAccessAPI: +class ResourceQuotasAPI: + """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that + can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per + metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and + limits. For more information on resource quotas see the [Unity Catalog documentation]. + + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + """ + + def __init__(self, api_client): + self._api = api_client + + def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: + """The GetQuota API returns usage information for a single resource quota, defined as a child-parent + pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered + asynchronously. The updated count might not be returned in the first call. + + :param parent_securable_type: str + Securable type of the quota parent. + :param parent_full_name: str + Full name of the parent resource. Provide the metastore ID if the parent is a metastore. + :param quota_name: str + Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. + + :returns: :class:`GetQuotaResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", + headers=headers, + ) + return GetQuotaResponse.from_dict(res) + + def list_quotas( + self, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[QuotaInfo]: + """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the + counts returned. This API does not trigger a refresh of quota counts. + + :param max_results: int (optional) + The number of quotas to return. + :param page_token: str (optional) + Opaque token for the next page of results. + + :returns: Iterator over :class:`QuotaInfo` + """ + + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers + ) + if "quotas" in json: + for v in json["quotas"]: + yield QuotaInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + +class RfaAPI: """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables. @@ -13583,79 +14816,6 @@ def update_access_request_destinations( return AccessRequestDestinations.from_dict(res) -class ResourceQuotasAPI: - """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that - can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per - metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and - limits. For more information on resource quotas see the [Unity Catalog documentation]. - - [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas - """ - - def __init__(self, api_client): - self._api = api_client - - def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: - """The GetQuota API returns usage information for a single resource quota, defined as a child-parent - pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered - asynchronously. The updated count might not be returned in the first call. - - :param parent_securable_type: str - Securable type of the quota parent. - :param parent_full_name: str - Full name of the parent resource. Provide the metastore ID if the parent is a metastore. - :param quota_name: str - Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - - :returns: :class:`GetQuotaResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", - headers=headers, - ) - return GetQuotaResponse.from_dict(res) - - def list_quotas( - self, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[QuotaInfo]: - """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the - counts returned. This API does not trigger a refresh of quota counts. - - :param max_results: int (optional) - The number of quotas to return. - :param page_token: str (optional) - Opaque token for the next page of results. - - :returns: Iterator over :class:`QuotaInfo` - """ - - query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do( - "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers - ) - if "quotas" in json: - for v in json["quotas"]: - yield QuotaInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have @@ -13674,7 +14834,7 @@ def create( properties: Optional[Dict[str, str]] = None, storage_root: Optional[str] = None, ) -> SchemaInfo: - """Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the + """Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. :param name: str @@ -14356,6 +15516,79 @@ class TablesAPI: def __init__(self, api_client): self._api = api_client + def create( + self, + name: str, + catalog_name: str, + schema_name: str, + table_type: TableType, + data_source_format: DataSourceFormat, + storage_location: str, + *, + columns: Optional[List[ColumnInfo]] = None, + properties: Optional[Dict[str, str]] = None, + ) -> TableInfo: + """Creates a new table in the specified catalog and schema. + + To create an external delta table, the caller must have the **EXTERNAL_USE_SCHEMA** privilege on the + parent schema and the **EXTERNAL_USE_LOCATION** privilege on the external location. These privileges + must always be granted explicitly, and cannot be inherited through ownership or **ALL_PRIVILEGES**. + + Standard UC permissions needed to create tables still apply: **USE_CATALOG** on the parent catalog (or + ownership of the parent catalog), **CREATE_TABLE** and **USE_SCHEMA** on the parent schema (or + ownership of the parent schema), and **CREATE_EXTERNAL_TABLE** on external location. + + The **columns** field needs to be in a Spark compatible format, so we recommend you use Spark to + create these tables. The API itself does not validate the correctness of the column spec. If the spec + is not Spark compatible, the tables may not be readable by Databricks Runtime. + + NOTE: The Create Table API for external clients only supports creating **external delta tables**. The + values shown in the respective enums are all values supported by Databricks, however for this specific + Create Table API, only **table_type** **EXTERNAL** and **data_source_format** **DELTA** are supported. + Additionally, column masks are not supported when creating tables through this API. + + :param name: str + Name of table, relative to parent schema. + :param catalog_name: str + Name of parent catalog. + :param schema_name: str + Name of parent schema relative to its parent catalog. + :param table_type: :class:`TableType` + :param data_source_format: :class:`DataSourceFormat` + :param storage_location: str + Storage root URL for table (for **MANAGED**, **EXTERNAL** tables). + :param columns: List[:class:`ColumnInfo`] (optional) + The array of __ColumnInfo__ definitions of the table's columns. + :param properties: Dict[str,str] (optional) + A map of key-value properties attached to the securable. + + :returns: :class:`TableInfo` + """ + body = {} + if catalog_name is not None: + body["catalog_name"] = catalog_name + if columns is not None: + body["columns"] = [v.as_dict() for v in columns] + if data_source_format is not None: + body["data_source_format"] = data_source_format.value + if name is not None: + body["name"] = name + if properties is not None: + body["properties"] = properties + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + if table_type is not None: + body["table_type"] = table_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/tables", body=body, headers=headers) + return TableInfo.from_dict(res) + def delete(self, full_name: str): """Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the @@ -14377,10 +15610,10 @@ def delete(self, full_name: str): def exists(self, full_name: str) -> TableExistsResponse: """Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the - owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the + owner of the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, - and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on - the parent catalog * Have BROWSE privilege on the parent schema. + and either be the table owner or have the **SELECT** privilege on the table. * Have **BROWSE** + privilege on the parent catalog * Have **BROWSE** privilege on the parent schema :param full_name: str Full name of the table. @@ -14405,9 +15638,9 @@ def get( ) -> TableInfo: """Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of - the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** - privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be - the table owner or have the SELECT privilege on the table. + the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, + and either be the table owner or have the **SELECT** privilege on the table. :param full_name: str Full name of the table. @@ -14605,19 +15838,86 @@ def update(self, full_name: str, *, owner: Optional[str] = None): self._api.do("PATCH", f"/api/2.1/unity-catalog/tables/{full_name}", body=body, headers=headers) +class TemporaryPathCredentialsAPI: + """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud + storage locations registered in Databricks. These credentials are employed to provide secure and + time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud + provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service + (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud + supports temporary credentials through OAuth 2.0. + + Temporary path credentials ensure that data access is limited in scope and duration, reducing the risk of + unauthorized access or misuse. To use the temporary path credentials API, a metastore admin needs to + enable the external_access_enabled flag (off by default) at the metastore level. A user needs to be + granted the EXTERNAL USE LOCATION permission by external location owner. For requests on existing external + tables, user also needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog + admin. + + Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog admin + explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for security reasons. + Similarly, EXTERNAL USE LOCATION is an external location level permission that can only be granted by + external location owner explicitly and is not included in external location ownership or ALL PRIVILEGES on + the external location for security reasons. + + This API only supports temporary path credentials for external locations and external tables, and volumes + will be supported in the future.""" + + def __init__(self, api_client): + self._api = api_client + + def generate_temporary_path_credentials( + self, url: str, operation: PathOperation, *, dry_run: Optional[bool] = None + ) -> GenerateTemporaryPathCredentialResponse: + """Get a short-lived credential for directly accessing cloud storage locations registered in Databricks. + The Generate Temporary Path Credentials API is only supported for external storage paths, specifically + external locations and external tables. Managed tables are not supported by this API. The metastore + must have **external_access_enabled** flag set to true (default false). The caller must have the + **EXTERNAL_USE_LOCATION** privilege on the external location; this privilege can only be granted by + external location owners. For requests on existing external tables, the caller must also have the + **EXTERNAL_USE_SCHEMA** privilege on the parent schema; this privilege can only be granted by catalog + owners. + + :param url: str + URL for path-based access. + :param operation: :class:`PathOperation` + The operation being performed on the path. + :param dry_run: bool (optional) + Optional. When set to true, the service will not validate that the generated credentials can perform + write operations, therefore no new paths will be created and the response will not contain valid + credentials. Defaults to false. + + :returns: :class:`GenerateTemporaryPathCredentialResponse` + """ + body = {} + if dry_run is not None: + body["dry_run"] = dry_run + if operation is not None: + body["operation"] = operation.value + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/unity-catalog/temporary-path-credentials", body=body, headers=headers) + return GenerateTemporaryPathCredentialResponse.from_dict(res) + + class TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage - locationswhere table data is stored in Databricks. These credentials are employed to provide secure and - time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider - has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS), - Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports - temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in - scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table - credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at - the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level - by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by - catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for - security reason.""" + locations where table data is stored in Databricks. These credentials are employed to provide secure and + time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud + provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service + (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud + supports temporary credentials through OAuth 2.0. + + Temporary table credentials ensure that data access is limited in scope and duration, reducing the risk of + unauthorized access or misuse. To use the temporary table credentials API, a metastore admin needs to + enable the external_access_enabled flag (off by default) at the metastore level, and user needs to be + granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin. Note that EXTERNAL USE + SCHEMA is a schema level permission that can only be granted by catalog admin explicitly and is not + included in schema ownership or ALL PRIVILEGES on the schema for security reasons.""" def __init__(self, api_client): self._api = api_client @@ -14626,9 +15926,9 @@ def generate_temporary_table_credentials( self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None ) -> GenerateTemporaryTableCredentialResponse: """Get a short-lived credential for directly accessing the table data on cloud storage. The metastore - must have external_access_enabled flag set to true (default false). The caller must have - EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog - owners. + must have **external_access_enabled** flag set to true (default false). The caller must have the + **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can only be granted by + catalog owners. :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is @@ -14696,6 +15996,11 @@ def create( :param name: str The name of the volume :param volume_type: :class:`VolumeType` + The type of the volume. An external volume is located in the specified external location. A managed + volume is located in the default location which is specified by the parent schema, or the parent + catalog, or the Metastore. [Learn more] + + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) @@ -14754,7 +16059,7 @@ def list( The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the - **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, + **READ VOLUME** privilege on the volume to receive the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 06ca9c2fe..57ea7e961 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -45,7 +45,7 @@ class CleanRoom: using the separate CreateCleanRoomOutputCatalog API.""" owner: Optional[str] = None - """This is Databricks username of the owner of the local clean room securable for permission + """This is the Databricks username of the owner of the local clean room securable for permission management.""" remote_detailed_info: Optional[CleanRoomRemoteDetail] = None @@ -142,7 +142,8 @@ class CleanRoomAsset: For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name.""" + For notebooks, the name is the notebook file name. For jar analyses, the name is the jar + analysis name.""" asset_type: CleanRoomAssetAssetType """The type of the asset.""" @@ -351,13 +352,13 @@ class CleanRoomAssetNotebook: """Server generated etag that represents the notebook version.""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """top-level status derived from all reviews""" + """Top-level status derived from all reviews""" reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing approvals or rejections""" runner_collaborator_aliases: Optional[List[str]] = None - """collaborators that can run the notebook""" + """Aliases of collaborators that can run the notebook.""" def as_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body.""" @@ -546,8 +547,12 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetVolumeLocalDetails: @dataclass class CleanRoomAutoApprovalRule: author_collaborator_alias: Optional[str] = None + """Collaborator alias of the author covered by the rule. Only one of `author_collaborator_alias` + and `author_scope` can be set.""" author_scope: Optional[CleanRoomAutoApprovalRuleAuthorScope] = None + """Scope of authors covered by the rule. Only one of `author_collaborator_alias` and `author_scope` + can be set.""" clean_room_name: Optional[str] = None """The name of the clean room this auto-approval rule belongs to.""" @@ -562,6 +567,7 @@ class CleanRoomAutoApprovalRule: """The owner of the rule to whom the rule applies.""" runner_collaborator_alias: Optional[str] = None + """Collaborator alias of the runner covered by the rule.""" def as_dict(self) -> dict: """Serializes the CleanRoomAutoApprovalRule into a dictionary suitable for use as a JSON request body.""" @@ -637,7 +643,7 @@ class CleanRoomCollaborator: It is not restricted to these values and could change in the future""" global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore id of the collaborator. The identifier is of format + """The global Unity Catalog metastore ID of the collaborator. The identifier is of format cloud:region:metastore-uuid.""" invite_recipient_email: Optional[str] = None @@ -704,19 +710,19 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomCollaborator: @dataclass class CleanRoomNotebookReview: comment: Optional[str] = None - """review comment""" + """Review comment""" created_at_millis: Optional[int] = None - """timestamp of when the review was submitted""" + """When the review was submitted, in epoch milliseconds""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """review outcome""" + """Review outcome""" review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None - """specified when the review was not explicitly made by a user""" + """Specified when the review was not explicitly made by a user""" reviewer_collaborator_alias: Optional[str] = None - """collaborator alias of the reviewer""" + """Collaborator alias of the reviewer""" def as_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body.""" @@ -1100,7 +1106,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: @dataclass class CreateCleanRoomAssetReviewResponse: notebook_review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """top-level status derived from all reviews""" + """Top-level status derived from all reviews""" notebook_reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing notebook approvals or rejections""" @@ -1348,13 +1354,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: @dataclass class NotebookVersionReview: etag: str - """etag that identifies the notebook version""" + """Etag identifying the notebook version""" review_state: CleanRoomNotebookReviewNotebookReviewState - """review outcome""" + """Review outcome""" comment: Optional[str] = None - """review comment""" + """Review comment""" def as_dict(self) -> dict: """Serializes the NotebookVersionReview into a dictionary suitable for use as a JSON request body.""" @@ -1503,17 +1509,18 @@ def create_clean_room_asset_review( clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, - notebook_review: NotebookVersionReview, + *, + notebook_review: Optional[NotebookVersionReview] = None, ) -> CreateCleanRoomAssetReviewResponse: - """submit an asset review + """Submit an asset review :param clean_room_name: str Name of the clean room :param asset_type: :class:`CleanRoomAssetAssetType` - can only be NOTEBOOK_FILE for now + Asset type. Can either be NOTEBOOK_FILE or JAR_ANALYSIS. :param name: str Name of the asset - :param notebook_review: :class:`NotebookVersionReview` + :param notebook_review: :class:`NotebookVersionReview` (optional) :returns: :class:`CreateCleanRoomAssetReviewResponse` """ @@ -1620,7 +1627,8 @@ def update( For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name. + For notebooks, the name is the notebook file name. For jar analyses, the name is the jar analysis + name. :param asset: :class:`CleanRoomAsset` The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to update. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 4d1ba3c0a..20a212cc8 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -325,6 +325,13 @@ class AzureAvailability(Enum): SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" +class BaseEnvironmentType(Enum): + """If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto""" + + CPU = "CPU" + GPU = "GPU" + + @dataclass class CancelResponse: def as_dict(self) -> dict: @@ -2722,6 +2729,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: class DefaultBaseEnvironment: base_environment_cache: Optional[List[DefaultBaseEnvironmentCache]] = None + base_environment_type: Optional[BaseEnvironmentType] = None + created_timestamp: Optional[int] = None creator_user_id: Optional[int] = None @@ -2753,6 +2762,8 @@ def as_dict(self) -> dict: body = {} if self.base_environment_cache: body["base_environment_cache"] = [v.as_dict() for v in self.base_environment_cache] + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type.value if self.created_timestamp is not None: body["created_timestamp"] = self.created_timestamp if self.creator_user_id is not None: @@ -2784,6 +2795,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.base_environment_cache: body["base_environment_cache"] = self.base_environment_cache + if self.base_environment_type is not None: + body["base_environment_type"] = self.base_environment_type if self.created_timestamp is not None: body["created_timestamp"] = self.created_timestamp if self.creator_user_id is not None: @@ -2815,6 +2828,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DefaultBaseEnvironment: """Deserializes the DefaultBaseEnvironment from a dictionary.""" return cls( base_environment_cache=_repeated_dict(d, "base_environment_cache", DefaultBaseEnvironmentCache), + base_environment_type=_enum(d, "base_environment_type", BaseEnvironmentType), created_timestamp=d.get("created_timestamp", None), creator_user_id=d.get("creator_user_id", None), environment=_from_dict(d, "environment", Environment), @@ -3307,6 +3321,9 @@ class Environment: version and a set of Python packages. The version is a string, consisting of an integer.""" jar_dependencies: Optional[List[str]] = None + """Use `java_dependencies` instead.""" + + java_dependencies: Optional[List[str]] = None """List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`.""" @@ -3321,6 +3338,8 @@ def as_dict(self) -> dict: body["environment_version"] = self.environment_version if self.jar_dependencies: body["jar_dependencies"] = [v for v in self.jar_dependencies] + if self.java_dependencies: + body["java_dependencies"] = [v for v in self.java_dependencies] return body def as_shallow_dict(self) -> dict: @@ -3334,6 +3353,8 @@ def as_shallow_dict(self) -> dict: body["environment_version"] = self.environment_version if self.jar_dependencies: body["jar_dependencies"] = self.jar_dependencies + if self.java_dependencies: + body["java_dependencies"] = self.java_dependencies return body @classmethod @@ -3344,6 +3365,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Environment: dependencies=d.get("dependencies", None), environment_version=d.get("environment_version", None), jar_dependencies=d.get("jar_dependencies", None), + java_dependencies=d.get("java_dependencies", None), ) @@ -3581,6 +3603,15 @@ class GcpAttributes: boot_disk_size: Optional[int] = None """Boot disk size in GB""" + first_on_demand: Optional[int] = None + """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This + value should be greater than 0, to make sure the cluster driver node is placed on an on-demand + instance. If this value is greater than or equal to the current cluster size, all nodes will be + placed on on-demand instances. If this value is less than the current cluster size, + `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed + on `availability` instances. Note that this value does not affect cluster size and cannot + currently be mutated over the lifetime of a cluster.""" + google_service_account: Optional[str] = None """If provided, the cluster will impersonate the google service account when accessing gcloud services (like GCS). The google service account must have previously been added to the @@ -3612,6 +3643,8 @@ def as_dict(self) -> dict: body["availability"] = self.availability.value if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3629,6 +3662,8 @@ def as_shallow_dict(self) -> dict: body["availability"] = self.availability if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3645,6 +3680,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: return cls( availability=_enum(d, "availability", GcpAvailability), boot_disk_size=d.get("boot_disk_size", None), + first_on_demand=d.get("first_on_demand", None), google_service_account=d.get("google_service_account", None), local_ssd_count=d.get("local_ssd_count", None), use_preemptible_executors=d.get("use_preemptible_executors", None), @@ -5016,6 +5052,16 @@ class InstancePoolAwsAttributes: availability: Optional[InstancePoolAwsAttributesAvailability] = None """Availability type used for the spot nodes.""" + instance_profile_arn: Optional[str] = None + """All AWS instances belonging to the instance pool will have this instance profile. If omitted, + instances will initially be launched with the workspace's default instance profile. If defined, + clusters that use the pool will inherit the instance profile, and must not specify their own + instance profile on cluster creation or update. If the pool does not specify an instance + profile, clusters using the pool may specify any instance profile. The instance profile must + have previously been added to the Databricks environment by an account administrator. + + This feature may only be available to certain customer plans.""" + spot_bid_price_percent: Optional[int] = None """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -5038,6 +5084,8 @@ def as_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability.value + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -5049,6 +5097,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -5060,6 +5110,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" return cls( availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability), + instance_profile_arn=d.get("instance_profile_arn", None), spot_bid_price_percent=d.get("spot_bid_price_percent", None), zone_id=d.get("zone_id", None), ) @@ -7457,6 +7508,8 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" + NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -7495,6 +7548,7 @@ class TerminationReasonCode(Enum): UNKNOWN = "UNKNOWN" UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" + USAGE_POLICY_ENTITLEMENT_DENIED = "USAGE_POLICY_ENTITLEMENT_DENIED" USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" USER_REQUEST = "USER_REQUEST" WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" @@ -10735,6 +10789,26 @@ def delete_default_base_environment(self, id: str): self._api.do("DELETE", f"/api/2.0/default-base-environments/{id}", headers=headers) + def get_default_base_environment(self, id: str) -> DefaultBaseEnvironment: + """Return the default base environment details for a given ID. + + :param id: str + + :returns: :class:`DefaultBaseEnvironment` + """ + + query = {} + if id is not None: + query["id"] = id + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/default-base-environments:getDefaultBaseEnvironment", query=query, headers=headers + ) + return DefaultBaseEnvironment.from_dict(res) + def install(self, cluster_id: str, libraries: List[Library]): """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -10829,13 +10903,13 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) def update_default_base_environment( - self, id: str, *, default_base_environment: Optional[DefaultBaseEnvironment] = None + self, id: str, default_base_environment: DefaultBaseEnvironment ) -> DefaultBaseEnvironment: """Update the default base environment for the given ID. This process will asynchronously regenerate the cache. The existing cache remains available until it expires. :param id: str - :param default_base_environment: :class:`DefaultBaseEnvironment` (optional) + :param default_base_environment: :class:`DefaultBaseEnvironment` :returns: :class:`DefaultBaseEnvironment` """ @@ -10850,15 +10924,20 @@ def update_default_base_environment( res = self._api.do("PATCH", f"/api/2.0/default-base-environments/{id}", body=body, headers=headers) return DefaultBaseEnvironment.from_dict(res) - def update_default_default_base_environment(self, id: str) -> DefaultBaseEnvironment: + def update_default_default_base_environment( + self, *, base_environment_type: Optional[BaseEnvironmentType] = None, id: Optional[str] = None + ) -> DefaultBaseEnvironment: """Set the default base environment for the workspace. This marks the specified DBE as the workspace default. - :param id: str + :param base_environment_type: :class:`BaseEnvironmentType` (optional) + :param id: str (optional) :returns: :class:`DefaultBaseEnvironment` """ body = {} + if base_environment_type is not None: + body["base_environment_type"] = base_environment_type.value if id is not None: body["id"] = id headers = { @@ -10866,7 +10945,7 @@ def update_default_default_base_environment(self, id: str) -> DefaultBaseEnviron "Content-Type": "application/json", } - res = self._api.do("PATCH", "/api/2.0/default-base-environments/default", body=body, headers=headers) + res = self._api.do("POST", "/api/2.0/default-base-environments:setDefault", body=body, headers=headers) return DefaultBaseEnvironment.from_dict(res) diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 79de97355..92df557c4 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -368,6 +368,9 @@ class GenieAttachment: query: Optional[GenieQueryAttachment] = None """Query Attachment if Genie responds with a SQL query""" + suggested_questions: Optional[GenieSuggestedQuestionsAttachment] = None + """Follow-up questions suggested by Genie""" + text: Optional[TextAttachment] = None """Text Attachment if Genie responds with text""" @@ -378,6 +381,8 @@ def as_dict(self) -> dict: body["attachment_id"] = self.attachment_id if self.query: body["query"] = self.query.as_dict() + if self.suggested_questions: + body["suggested_questions"] = self.suggested_questions.as_dict() if self.text: body["text"] = self.text.as_dict() return body @@ -389,6 +394,8 @@ def as_shallow_dict(self) -> dict: body["attachment_id"] = self.attachment_id if self.query: body["query"] = self.query + if self.suggested_questions: + body["suggested_questions"] = self.suggested_questions if self.text: body["text"] = self.text return body @@ -399,6 +406,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieAttachment: return cls( attachment_id=d.get("attachment_id", None), query=_from_dict(d, "query", GenieQueryAttachment), + suggested_questions=_from_dict(d, "suggested_questions", GenieSuggestedQuestionsAttachment), text=_from_dict(d, "text", TextAttachment), ) @@ -518,6 +526,40 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: ) +@dataclass +class GenieFeedback: + """Feedback containing rating and optional comment""" + + comment: Optional[str] = None + """Optional feedback comment text""" + + rating: Optional[GenieFeedbackRating] = None + """The feedback rating""" + + def as_dict(self) -> dict: + """Serializes the GenieFeedback into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.comment is not None: + body["comment"] = self.comment + if self.rating is not None: + body["rating"] = self.rating.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieFeedback into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: + body["comment"] = self.comment + if self.rating is not None: + body["rating"] = self.rating + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieFeedback: + """Deserializes the GenieFeedback from a dictionary.""" + return cls(comment=d.get("comment", None), rating=_enum(d, "rating", GenieFeedbackRating)) + + class GenieFeedbackRating(Enum): """Feedback rating for Genie messages""" @@ -728,6 +770,9 @@ class GenieMessage: error: Optional[MessageError] = None """Error message if Genie failed to respond to the message""" + feedback: Optional[GenieFeedback] = None + """User feedback for the message if provided""" + last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" @@ -753,6 +798,8 @@ def as_dict(self) -> dict: body["created_timestamp"] = self.created_timestamp if self.error: body["error"] = self.error.as_dict() + if self.feedback: + body["feedback"] = self.feedback.as_dict() if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -782,6 +829,8 @@ def as_shallow_dict(self) -> dict: body["created_timestamp"] = self.created_timestamp if self.error: body["error"] = self.error + if self.feedback: + body["feedback"] = self.feedback if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -807,6 +856,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieMessage: conversation_id=d.get("conversation_id", None), created_timestamp=d.get("created_timestamp", None), error=_from_dict(d, "error", MessageError), + feedback=_from_dict(d, "feedback", GenieFeedback), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), message_id=d.get("message_id", None), @@ -827,6 +877,8 @@ class GenieQueryAttachment: last_updated_timestamp: Optional[int] = None """Time when the user updated the query last""" + parameters: Optional[List[QueryAttachmentParameter]] = None + query: Optional[str] = None """AI generated SQL query""" @@ -849,6 +901,8 @@ def as_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -868,6 +922,8 @@ def as_shallow_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp + if self.parameters: + body["parameters"] = self.parameters if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -885,6 +941,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieQueryAttachment: description=d.get("description", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), + parameters=_repeated_dict(d, "parameters", QueryAttachmentParameter), query=d.get("query", None), query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata), statement_id=d.get("statement_id", None), @@ -935,6 +992,9 @@ class GenieSpace: description: Optional[str] = None """Description of the Genie Space""" + warehouse_id: Optional[str] = None + """Warehouse associated with the Genie Space""" + def as_dict(self) -> dict: """Serializes the GenieSpace into a dictionary suitable for use as a JSON request body.""" body = {} @@ -944,6 +1004,8 @@ def as_dict(self) -> dict: body["space_id"] = self.space_id if self.title is not None: body["title"] = self.title + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: @@ -955,12 +1017,19 @@ def as_shallow_dict(self) -> dict: body["space_id"] = self.space_id if self.title is not None: body["title"] = self.title + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: """Deserializes the GenieSpace from a dictionary.""" - return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) + return cls( + description=d.get("description", None), + space_id=d.get("space_id", None), + title=d.get("title", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass @@ -1012,6 +1081,33 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationResponse: ) +@dataclass +class GenieSuggestedQuestionsAttachment: + """Follow-up questions suggested by Genie""" + + questions: Optional[List[str]] = None + """The suggested follow-up questions""" + + def as_dict(self) -> dict: + """Serializes the GenieSuggestedQuestionsAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.questions: + body["questions"] = [v for v in self.questions] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieSuggestedQuestionsAttachment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.questions: + body["questions"] = self.questions + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieSuggestedQuestionsAttachment: + """Deserializes the GenieSuggestedQuestionsAttachment from a dictionary.""" + return cls(questions=d.get("questions", None)) + + @dataclass class GetPublishedDashboardEmbeddedResponse: def as_dict(self) -> dict: @@ -1227,6 +1323,7 @@ class MessageErrorType(Enum): DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR" DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT" DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE" + EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION = "EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION" FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" @@ -1237,6 +1334,11 @@ class MessageErrorType(Enum): GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_FAILED_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_ONGOING_EXCEPTION" + INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION = "INTERNAL_CATALOG_ASSET_CREATION_UNSUPPORTED_EXCEPTION" + INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION = "INTERNAL_CATALOG_MISSING_UC_PATH_EXCEPTION" + INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION" INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" @@ -1424,6 +1526,42 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: ) +@dataclass +class QueryAttachmentParameter: + keyword: Optional[str] = None + + sql_type: Optional[str] = None + + value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the QueryAttachmentParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.keyword is not None: + body["keyword"] = self.keyword + if self.sql_type is not None: + body["sql_type"] = self.sql_type + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryAttachmentParameter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.keyword is not None: + body["keyword"] = self.keyword + if self.sql_type is not None: + body["sql_type"] = self.sql_type + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter: + """Deserializes the QueryAttachmentParameter from a dictionary.""" + return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None)) + + @dataclass class QueryResponseStatus: canceled: Optional[Empty] = None @@ -1991,6 +2129,49 @@ def create_message_and_wait( timeout=timeout ) + def create_space( + self, + warehouse_id: str, + serialized_space: str, + *, + description: Optional[str] = None, + parent_path: Optional[str] = None, + title: Optional[str] = None, + ) -> GenieSpace: + """Creates a Genie space from a serialized payload. + + :param warehouse_id: str + Warehouse to associate with the new space + :param serialized_space: str + Serialized export model for the space contents + :param description: str (optional) + Optional description + :param parent_path: str (optional) + Parent folder path where the space will be registered + :param title: str (optional) + Optional title override + + :returns: :class:`GenieSpace` + """ + body = {} + if description is not None: + body["description"] = description + if parent_path is not None: + body["parent_path"] = parent_path + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/genie/spaces", body=body, headers=headers) + return GenieSpace.from_dict(res) + def delete_conversation(self, space_id: str, conversation_id: str): """Delete a conversation. @@ -2008,6 +2189,29 @@ def delete_conversation(self, space_id: str, conversation_id: str): self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}", headers=headers) + def delete_conversation_message(self, space_id: str, conversation_id: str, message_id: str): + """Delete a conversation message. + + :param space_id: str + The ID associated with the Genie space where the message is located. + :param conversation_id: str + The ID associated with the conversation. + :param message_id: str + The ID associated with the message to delete. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}", + headers=headers, + ) + def execute_message_attachment_query( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: @@ -2040,7 +2244,8 @@ def execute_message_attachment_query( def execute_message_query( self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """Execute the SQL query in the message. + """DEPRECATED: Use [Execute Message Attachment Query](:method:genie/executemessageattachmentquery) + instead. :param space_id: str Genie space ID @@ -2188,8 +2393,8 @@ def get_message_attachment_query_result( def get_message_query_result( self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """Get the result of SQL query if the message has a query attachment. This is only available if a message - has a query attachment and the message status is `EXECUTING_QUERY`. + """DEPRECATED: Use [Get Message Attachment Query Result](:method:genie/getmessageattachmentqueryresult) + instead. :param space_id: str Genie space ID @@ -2215,8 +2420,8 @@ def get_message_query_result( def get_message_query_result_by_attachment( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: - """Get the result of SQL query if the message has a query attachment. This is only available if a message - has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. + """DEPRECATED: Use [Get Message Attachment Query Result](:method:genie/getmessageattachmentqueryresult) + instead. :param space_id: str Genie space ID @@ -2292,12 +2497,20 @@ def list_conversation_messages( return GenieListConversationMessagesResponse.from_dict(res) def list_conversations( - self, space_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + self, + space_id: str, + *, + include_all: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, ) -> GenieListConversationsResponse: """Get a list of conversations in a Genie Space. :param space_id: str The ID of the Genie space to retrieve conversations from. + :param include_all: bool (optional) + Include all conversations in the space across all users. Requires at least CAN MANAGE permission on + the space. :param page_size: int (optional) Maximum number of conversations to return per page :param page_token: str (optional) @@ -2307,6 +2520,8 @@ def list_conversations( """ query = {} + if include_all is not None: + query["include_all"] = include_all if page_size is not None: query["page_size"] = page_size if page_token is not None: @@ -2344,7 +2559,13 @@ def list_spaces( return GenieListSpacesResponse.from_dict(res) def send_message_feedback( - self, space_id: str, conversation_id: str, message_id: str, feedback_rating: GenieFeedbackRating + self, + space_id: str, + conversation_id: str, + message_id: str, + rating: GenieFeedbackRating, + *, + comment: Optional[str] = None, ): """Send feedback for a message. @@ -2354,14 +2575,18 @@ def send_message_feedback( The ID associated with the conversation. :param message_id: str The ID associated with the message to provide feedback for. - :param feedback_rating: :class:`GenieFeedbackRating` + :param rating: :class:`GenieFeedbackRating` The rating (POSITIVE, NEGATIVE, or NONE). + :param comment: str (optional) + Optional text feedback that will be stored as a comment. """ body = {} - if feedback_rating is not None: - body["feedback_rating"] = feedback_rating.value + if comment is not None: + body["comment"] = comment + if rating is not None: + body["rating"] = rating.value headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -2423,6 +2648,47 @@ def trash_space(self, space_id: str): self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + def update_space( + self, + space_id: str, + *, + description: Optional[str] = None, + serialized_space: Optional[str] = None, + title: Optional[str] = None, + warehouse_id: Optional[str] = None, + ) -> GenieSpace: + """Updates a Genie space with a serialized payload. + + :param space_id: str + Genie space ID + :param description: str (optional) + Optional description + :param serialized_space: str (optional) + Serialized export model for the space contents (full replacement) + :param title: str (optional) + Optional title override + :param warehouse_id: str (optional) + Optional warehouse override + + :returns: :class:`GenieSpace` + """ + body = {} + if description is not None: + body["description"] = description + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/genie/spaces/{space_id}", body=body, headers=headers) + return GenieSpace.from_dict(res) + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index efd4cc9e1..6f7463a34 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -18,6 +18,169 @@ # all definitions in this file are in alphabetical order +@dataclass +class CustomTag: + key: Optional[str] = None + """The key of the custom tag.""" + + value: Optional[str] = None + """The value of the custom tag.""" + + def as_dict(self) -> dict: + """Serializes the CustomTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomTag: + """Deserializes the CustomTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + +@dataclass +class DatabaseBranch: + project_id: str + + branch_id: Optional[str] = None + + create_time: Optional[str] = None + """A timestamp indicating when the branch was created.""" + + current_state: Optional[str] = None + """The branch’s state, indicating if it is initializing, ready for use, or archived.""" + + default: Optional[bool] = None + """Whether the branch is the project's default branch. This field is only returned on create/update + responses. See effective_default for the value that is actually applied to the database branch.""" + + effective_default: Optional[bool] = None + """Whether the branch is the project's default branch.""" + + is_protected: Optional[bool] = None + """Whether the branch is protected.""" + + logical_size_bytes: Optional[int] = None + """The logical size of the branch.""" + + parent_id: Optional[str] = None + """The id of the parent branch""" + + parent_lsn: Optional[str] = None + """The Log Sequence Number (LSN) on the parent branch from which this branch was created. When + restoring a branch using the Restore Database Branch endpoint, this value isn’t finalized + until all operations related to the restore have completed successfully.""" + + parent_time: Optional[str] = None + """The point in time on the parent branch from which this branch was created.""" + + pending_state: Optional[str] = None + + state_change_time: Optional[str] = None + """A timestamp indicating when the `current_state` began.""" + + update_time: Optional[str] = None + """A timestamp indicating when the branch was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseBranch into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.default is not None: + body["default"] = self.default + if self.effective_default is not None: + body["effective_default"] = self.effective_default + if self.is_protected is not None: + body["is_protected"] = self.is_protected + if self.logical_size_bytes is not None: + body["logical_size_bytes"] = self.logical_size_bytes + if self.parent_id is not None: + body["parent_id"] = self.parent_id + if self.parent_lsn is not None: + body["parent_lsn"] = self.parent_lsn + if self.parent_time is not None: + body["parent_time"] = self.parent_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.project_id is not None: + body["project_id"] = self.project_id + if self.state_change_time is not None: + body["state_change_time"] = self.state_change_time + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseBranch into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.default is not None: + body["default"] = self.default + if self.effective_default is not None: + body["effective_default"] = self.effective_default + if self.is_protected is not None: + body["is_protected"] = self.is_protected + if self.logical_size_bytes is not None: + body["logical_size_bytes"] = self.logical_size_bytes + if self.parent_id is not None: + body["parent_id"] = self.parent_id + if self.parent_lsn is not None: + body["parent_lsn"] = self.parent_lsn + if self.parent_time is not None: + body["parent_time"] = self.parent_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.project_id is not None: + body["project_id"] = self.project_id + if self.state_change_time is not None: + body["state_change_time"] = self.state_change_time + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseBranch: + """Deserializes the DatabaseBranch from a dictionary.""" + return cls( + branch_id=d.get("branch_id", None), + create_time=d.get("create_time", None), + current_state=d.get("current_state", None), + default=d.get("default", None), + effective_default=d.get("effective_default", None), + is_protected=d.get("is_protected", None), + logical_size_bytes=d.get("logical_size_bytes", None), + parent_id=d.get("parent_id", None), + parent_lsn=d.get("parent_lsn", None), + parent_time=d.get("parent_time", None), + pending_state=d.get("pending_state", None), + project_id=d.get("project_id", None), + state_change_time=d.get("state_change_time", None), + update_time=d.get("update_time", None), + ) + + @dataclass class DatabaseCatalog: name: str @@ -31,6 +194,12 @@ class DatabaseCatalog: create_database_if_not_exists: Optional[bool] = None + database_branch_id: Optional[str] = None + """The branch_id of the database branch associated with the catalog.""" + + database_project_id: Optional[str] = None + """The project_id of the database project associated with the catalog.""" + uid: Optional[str] = None def as_dict(self) -> dict: @@ -38,10 +207,14 @@ def as_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -53,10 +226,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.create_database_if_not_exists is not None: body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_branch_id is not None: + body["database_branch_id"] = self.database_branch_id if self.database_instance_name is not None: body["database_instance_name"] = self.database_instance_name if self.database_name is not None: body["database_name"] = self.database_name + if self.database_project_id is not None: + body["database_project_id"] = self.database_project_id if self.name is not None: body["name"] = self.name if self.uid is not None: @@ -68,8 +245,10 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: """Deserializes the DatabaseCatalog from a dictionary.""" return cls( create_database_if_not_exists=d.get("create_database_if_not_exists", None), + database_branch_id=d.get("database_branch_id", None), database_instance_name=d.get("database_instance_name", None), database_name=d.get("database_name", None), + database_project_id=d.get("database_project_id", None), name=d.get("name", None), uid=d.get("uid", None), ) @@ -105,6 +284,221 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential: return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None)) +@dataclass +class DatabaseEndpoint: + project_id: str + + branch_id: str + + autoscaling_limit_max_cu: Optional[float] = None + """The maximum number of Compute Units.""" + + autoscaling_limit_min_cu: Optional[float] = None + """The minimum number of Compute Units.""" + + create_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was created.""" + + current_state: Optional[DatabaseEndpointState] = None + + disabled: Optional[bool] = None + """Whether to restrict connections to the compute endpoint. Enabling this option schedules a + suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + console action.""" + + endpoint_id: Optional[str] = None + + host: Optional[str] = None + """The hostname of the compute endpoint. This is the hostname specified when connecting to a + database.""" + + last_active_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last active.""" + + pending_state: Optional[DatabaseEndpointState] = None + + pooler_mode: Optional[DatabaseEndpointPoolerMode] = None + + settings: Optional[DatabaseEndpointSettings] = None + + start_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last started.""" + + suspend_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last suspended.""" + + suspend_timeout_duration: Optional[str] = None + """Duration of inactivity after which the compute endpoint is automatically suspended.""" + + type: Optional[DatabaseEndpointType] = None + """NOTE: if want type to default to some value set the server then an effective_type field OR make + this field REQUIRED""" + + update_time: Optional[str] = None + """A timestamp indicating when the compute endpoint was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseEndpoint into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state.value + if self.disabled is not None: + body["disabled"] = self.disabled + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.host is not None: + body["host"] = self.host + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state.value + if self.pooler_mode is not None: + body["pooler_mode"] = self.pooler_mode.value + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings.as_dict() + if self.start_time is not None: + body["start_time"] = self.start_time + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + if self.type is not None: + body["type"] = self.type.value + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseEndpoint into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.branch_id is not None: + body["branch_id"] = self.branch_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.current_state is not None: + body["current_state"] = self.current_state + if self.disabled is not None: + body["disabled"] = self.disabled + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.host is not None: + body["host"] = self.host + if self.last_active_time is not None: + body["last_active_time"] = self.last_active_time + if self.pending_state is not None: + body["pending_state"] = self.pending_state + if self.pooler_mode is not None: + body["pooler_mode"] = self.pooler_mode + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings + if self.start_time is not None: + body["start_time"] = self.start_time + if self.suspend_time is not None: + body["suspend_time"] = self.suspend_time + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + if self.type is not None: + body["type"] = self.type + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseEndpoint: + """Deserializes the DatabaseEndpoint from a dictionary.""" + return cls( + autoscaling_limit_max_cu=d.get("autoscaling_limit_max_cu", None), + autoscaling_limit_min_cu=d.get("autoscaling_limit_min_cu", None), + branch_id=d.get("branch_id", None), + create_time=d.get("create_time", None), + current_state=_enum(d, "current_state", DatabaseEndpointState), + disabled=d.get("disabled", None), + endpoint_id=d.get("endpoint_id", None), + host=d.get("host", None), + last_active_time=d.get("last_active_time", None), + pending_state=_enum(d, "pending_state", DatabaseEndpointState), + pooler_mode=_enum(d, "pooler_mode", DatabaseEndpointPoolerMode), + project_id=d.get("project_id", None), + settings=_from_dict(d, "settings", DatabaseEndpointSettings), + start_time=d.get("start_time", None), + suspend_time=d.get("suspend_time", None), + suspend_timeout_duration=d.get("suspend_timeout_duration", None), + type=_enum(d, "type", DatabaseEndpointType), + update_time=d.get("update_time", None), + ) + + +class DatabaseEndpointPoolerMode(Enum): + """The connection pooler mode. Lakebase supports PgBouncer in `transaction` mode only.""" + + TRANSACTION = "TRANSACTION" + + +@dataclass +class DatabaseEndpointSettings: + """A collection of settings for a compute endpoint""" + + pg_settings: Optional[Dict[str, str]] = None + """A raw representation of Postgres settings.""" + + pgbouncer_settings: Optional[Dict[str, str]] = None + """A raw representation of PgBouncer settings.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseEndpointSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseEndpointSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseEndpointSettings: + """Deserializes the DatabaseEndpointSettings from a dictionary.""" + return cls(pg_settings=d.get("pg_settings", None), pgbouncer_settings=d.get("pgbouncer_settings", None)) + + +class DatabaseEndpointState(Enum): + """The state of the compute endpoint""" + + ACTIVE = "ACTIVE" + IDLE = "IDLE" + INIT = "INIT" + + +class DatabaseEndpointType(Enum): + """The compute endpoint type. Either `read_write` or `read_only`.""" + + READ_ONLY = "READ_ONLY" + READ_WRITE = "READ_WRITE" + + @dataclass class DatabaseInstance: """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and @@ -113,11 +507,6 @@ class DatabaseInstance: name: str """The name of the instance. This is the unique identifier for the instance.""" - budget_policy_id: Optional[str] = None - """The desired budget policy to associate with the instance. This field is only returned on - create/update responses, and represents the customer provided budget policy. See - effective_budget_policy_id for the policy that is actually applied to the instance.""" - capacity: Optional[str] = None """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" @@ -130,51 +519,46 @@ class DatabaseInstance: creator: Optional[str] = None """The email of the creator of the instance.""" - effective_budget_policy_id: Optional[str] = None - """The policy that is applied to the instance.""" + custom_tags: Optional[List[CustomTag]] = None + """Custom tags associated with the instance. This field is only included on create and update + responses.""" + + effective_capacity: Optional[str] = None + """Deprecated. The sku of the instance; this field will always match the value of capacity.""" + + effective_custom_tags: Optional[List[CustomTag]] = None + """The recorded custom tags associated with the instance.""" effective_enable_pg_native_login: Optional[bool] = None - """xref AIP-129. `enable_pg_native_login` is owned by the client, while - `effective_enable_pg_native_login` is owned by the server. `enable_pg_native_login` will only be - set in Create/Update response messages if and only if the user provides the field via the - request. `effective_enable_pg_native_login` on the other hand will always bet set in all - response messages (Create/Update/Get/List).""" + """Whether the instance has PG native password login enabled.""" effective_enable_readable_secondaries: Optional[bool] = None - """xref AIP-129. `enable_readable_secondaries` is owned by the client, while - `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries` - will only be set in Create/Update response messages if and only if the user provides the field - via the request. `effective_enable_readable_secondaries` on the other hand will always bet set - in all response messages (Create/Update/Get/List).""" + """Whether secondaries serving read-only traffic are enabled. Defaults to false.""" effective_node_count: Optional[int] = None - """xref AIP-129. `node_count` is owned by the client, while `effective_node_count` is owned by the - server. `node_count` will only be set in Create/Update response messages if and only if the user - provides the field via the request. `effective_node_count` on the other hand will always bet set - in all response messages (Create/Update/Get/List).""" + """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults + to 1 primary and 0 secondaries.""" effective_retention_window_in_days: Optional[int] = None - """xref AIP-129. `retention_window_in_days` is owned by the client, while - `effective_retention_window_in_days` is owned by the server. `retention_window_in_days` will - only be set in Create/Update response messages if and only if the user provides the field via - the request. `effective_retention_window_in_days` on the other hand will always bet set in all - response messages (Create/Update/Get/List).""" + """The retention window for the instance. This is the time window in days for which the historical + data is retained.""" effective_stopped: Optional[bool] = None - """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the - server. `stopped` will only be set in Create/Update response messages if and only if the user - provides the field via the request. `effective_stopped` on the other hand will always bet set in - all response messages (Create/Update/Get/List).""" + """Whether the instance is stopped.""" + + effective_usage_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" enable_pg_native_login: Optional[bool] = None - """Whether the instance has PG native password login enabled. Defaults to true.""" + """Whether to enable PG native password login on the instance. Defaults to false.""" enable_readable_secondaries: Optional[bool] = None """Whether to enable secondaries to serve read-only traffic. Defaults to false.""" node_count: Optional[int] = None """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults - to 1 primary and 0 secondaries.""" + to 1 primary and 0 secondaries. This field is input only, see effective_node_count for the + output.""" parent_instance_ref: Optional[DatabaseInstanceRef] = None """The ref of the parent instance. This is only available if the instance is child instance. Input: @@ -199,16 +583,17 @@ class DatabaseInstance: """The current state of the instance.""" stopped: Optional[bool] = None - """Whether the instance is stopped.""" + """Whether to stop the instance. An input only param, see effective_stopped for the output.""" uid: Optional[str] = None """An immutable UUID identifier for the instance.""" + usage_policy_id: Optional[str] = None + """The desired usage policy to associate with the instance.""" + def as_dict(self) -> dict: """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -217,8 +602,12 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.effective_capacity is not None: + body["effective_capacity"] = self.effective_capacity + if self.effective_custom_tags: + body["effective_custom_tags"] = [v.as_dict() for v in self.effective_custom_tags] if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -229,6 +618,8 @@ def as_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -253,13 +644,13 @@ def as_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id if self.capacity is not None: body["capacity"] = self.capacity if self.child_instance_refs: @@ -268,8 +659,12 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.effective_capacity is not None: + body["effective_capacity"] = self.effective_capacity + if self.effective_custom_tags: + body["effective_custom_tags"] = self.effective_custom_tags if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -280,6 +675,8 @@ def as_shallow_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -304,23 +701,27 @@ def as_shallow_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: """Deserializes the DatabaseInstance from a dictionary.""" return cls( - budget_policy_id=d.get("budget_policy_id", None), capacity=d.get("capacity", None), child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef), creation_time=d.get("creation_time", None), creator=d.get("creator", None), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), + custom_tags=_repeated_dict(d, "custom_tags", CustomTag), + effective_capacity=d.get("effective_capacity", None), + effective_custom_tags=_repeated_dict(d, "effective_custom_tags", CustomTag), effective_enable_pg_native_login=d.get("effective_enable_pg_native_login", None), effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None), effective_node_count=d.get("effective_node_count", None), effective_retention_window_in_days=d.get("effective_retention_window_in_days", None), effective_stopped=d.get("effective_stopped", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), enable_pg_native_login=d.get("enable_pg_native_login", None), enable_readable_secondaries=d.get("enable_readable_secondaries", None), name=d.get("name", None), @@ -333,6 +734,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: state=_enum(d, "state", DatabaseInstanceState), stopped=d.get("stopped", None), uid=d.get("uid", None), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -353,12 +755,9 @@ class DatabaseInstanceRef: provided as input to create a child instance.""" effective_lsn: Optional[str] = None - """xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. `lsn` - will only be set in Create/Update response messages if and only if the user provides the field - via the request. `effective_lsn` on the other hand will always bet set in all response messages - (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from - which the instance was created. For a child ref instance, this is the LSN on the instance from - which the child instance was created.""" + """For a parent ref instance, this is the LSN on the parent instance from which the instance was + created. For a child ref instance, this is the LSN on the instance from which the child instance + was created.""" lsn: Optional[str] = None """User-specified WAL LSN of the ref database instance. @@ -418,25 +817,34 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef: class DatabaseInstanceRole: """A DatabaseInstanceRole represents a Postgres role in a database instance.""" + name: str + """The name of the role. This is the unique identifier for the role in an instance.""" + attributes: Optional[DatabaseInstanceRoleAttributes] = None - """API-exposed Postgres role attributes""" + """The desired API-exposed Postgres role attribute to associate with the role. Optional.""" + + effective_attributes: Optional[DatabaseInstanceRoleAttributes] = None + """The attributes that are applied to the role.""" identity_type: Optional[DatabaseInstanceRoleIdentityType] = None """The type of the role.""" + instance_name: Optional[str] = None + membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None """An enum value for a standard role that this role is a member of.""" - name: Optional[str] = None - """The name of the role. This is the unique identifier for the role in an instance.""" - def as_dict(self) -> dict: """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body.""" body = {} if self.attributes: body["attributes"] = self.attributes.as_dict() + if self.effective_attributes: + body["effective_attributes"] = self.effective_attributes.as_dict() if self.identity_type is not None: body["identity_type"] = self.identity_type.value + if self.instance_name is not None: + body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role.value if self.name is not None: @@ -448,8 +856,12 @@ def as_shallow_dict(self) -> dict: body = {} if self.attributes: body["attributes"] = self.attributes + if self.effective_attributes: + body["effective_attributes"] = self.effective_attributes if self.identity_type is not None: body["identity_type"] = self.identity_type + if self.instance_name is not None: + body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role if self.name is not None: @@ -461,7 +873,9 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole: """Deserializes the DatabaseInstanceRole from a dictionary.""" return cls( attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes), + effective_attributes=_from_dict(d, "effective_attributes", DatabaseInstanceRoleAttributes), identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType), + instance_name=d.get("instance_name", None), membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole), name=d.get("name", None), ) @@ -535,6 +949,261 @@ class DatabaseInstanceState(Enum): UPDATING = "UPDATING" +@dataclass +class DatabaseProject: + branch_logical_size_limit_bytes: Optional[int] = None + """The logical size limit for a branch.""" + + budget_policy_id: Optional[str] = None + """The desired budget policy to associate with the instance. This field is only returned on + create/update responses, and represents the customer provided budget policy. See + effective_budget_policy_id for the policy that is actually applied to the instance.""" + + compute_last_active_time: Optional[str] = None + """The most recent time when any endpoint of this project was active.""" + + create_time: Optional[str] = None + """A timestamp indicating when the project was created.""" + + custom_tags: Optional[List[DatabaseProjectCustomTag]] = None + """Custom tags associated with the instance.""" + + default_endpoint_settings: Optional[DatabaseProjectDefaultEndpointSettings] = None + + display_name: Optional[str] = None + """Human-readable project name.""" + + effective_budget_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" + + history_retention_duration: Optional[str] = None + """The number of seconds to retain the shared history for point in time recovery for all branches + in this project.""" + + pg_version: Optional[int] = None + """The major Postgres version number. NOTE: fields could be either user-set or server-set. we can't + have fields that are optionally user-provided and server-set to default value. TODO: this needs + an effective variant or make REQUIRED""" + + project_id: Optional[str] = None + + settings: Optional[DatabaseProjectSettings] = None + + synthetic_storage_size_bytes: Optional[int] = None + """The current space occupied by the project in storage. Synthetic storage size combines the + logical data size and Write-Ahead Log (WAL) size for all branches in a project.""" + + update_time: Optional[str] = None + """A timestamp indicating when the project was last updated.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProject into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch_logical_size_limit_bytes is not None: + body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.default_endpoint_settings: + body["default_endpoint_settings"] = self.default_endpoint_settings.as_dict() + if self.display_name is not None: + body["display_name"] = self.display_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.history_retention_duration is not None: + body["history_retention_duration"] = self.history_retention_duration + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings.as_dict() + if self.synthetic_storage_size_bytes is not None: + body["synthetic_storage_size_bytes"] = self.synthetic_storage_size_bytes + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch_logical_size_limit_bytes is not None: + body["branch_logical_size_limit_bytes"] = self.branch_logical_size_limit_bytes + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_last_active_time is not None: + body["compute_last_active_time"] = self.compute_last_active_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_endpoint_settings: + body["default_endpoint_settings"] = self.default_endpoint_settings + if self.display_name is not None: + body["display_name"] = self.display_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.history_retention_duration is not None: + body["history_retention_duration"] = self.history_retention_duration + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.project_id is not None: + body["project_id"] = self.project_id + if self.settings: + body["settings"] = self.settings + if self.synthetic_storage_size_bytes is not None: + body["synthetic_storage_size_bytes"] = self.synthetic_storage_size_bytes + if self.update_time is not None: + body["update_time"] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProject: + """Deserializes the DatabaseProject from a dictionary.""" + return cls( + branch_logical_size_limit_bytes=d.get("branch_logical_size_limit_bytes", None), + budget_policy_id=d.get("budget_policy_id", None), + compute_last_active_time=d.get("compute_last_active_time", None), + create_time=d.get("create_time", None), + custom_tags=_repeated_dict(d, "custom_tags", DatabaseProjectCustomTag), + default_endpoint_settings=_from_dict( + d, "default_endpoint_settings", DatabaseProjectDefaultEndpointSettings + ), + display_name=d.get("display_name", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + history_retention_duration=d.get("history_retention_duration", None), + pg_version=d.get("pg_version", None), + project_id=d.get("project_id", None), + settings=_from_dict(d, "settings", DatabaseProjectSettings), + synthetic_storage_size_bytes=d.get("synthetic_storage_size_bytes", None), + update_time=d.get("update_time", None), + ) + + +@dataclass +class DatabaseProjectCustomTag: + key: Optional[str] = None + """The key of the custom tag.""" + + value: Optional[str] = None + """The value of the custom tag.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectCustomTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectCustomTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectCustomTag: + """Deserializes the DatabaseProjectCustomTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + +@dataclass +class DatabaseProjectDefaultEndpointSettings: + """A collection of settings for a database endpoint.""" + + autoscaling_limit_max_cu: Optional[float] = None + """The maximum number of Compute Units.""" + + autoscaling_limit_min_cu: Optional[float] = None + """The minimum number of Compute Units.""" + + pg_settings: Optional[Dict[str, str]] = None + """A raw representation of Postgres settings.""" + + pgbouncer_settings: Optional[Dict[str, str]] = None + """A raw representation of PgBouncer settings.""" + + suspend_timeout_duration: Optional[str] = None + """Duration of inactivity after which the compute endpoint is automatically suspended.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectDefaultEndpointSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectDefaultEndpointSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.autoscaling_limit_max_cu is not None: + body["autoscaling_limit_max_cu"] = self.autoscaling_limit_max_cu + if self.autoscaling_limit_min_cu is not None: + body["autoscaling_limit_min_cu"] = self.autoscaling_limit_min_cu + if self.pg_settings: + body["pg_settings"] = self.pg_settings + if self.pgbouncer_settings: + body["pgbouncer_settings"] = self.pgbouncer_settings + if self.suspend_timeout_duration is not None: + body["suspend_timeout_duration"] = self.suspend_timeout_duration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectDefaultEndpointSettings: + """Deserializes the DatabaseProjectDefaultEndpointSettings from a dictionary.""" + return cls( + autoscaling_limit_max_cu=d.get("autoscaling_limit_max_cu", None), + autoscaling_limit_min_cu=d.get("autoscaling_limit_min_cu", None), + pg_settings=d.get("pg_settings", None), + pgbouncer_settings=d.get("pgbouncer_settings", None), + suspend_timeout_duration=d.get("suspend_timeout_duration", None), + ) + + +@dataclass +class DatabaseProjectSettings: + enable_logical_replication: Optional[bool] = None + """Sets wal_level=logical for all compute endpoints in this project. All active endpoints will be + suspended. Once enabled, logical replication cannot be disabled.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseProjectSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.enable_logical_replication is not None: + body["enable_logical_replication"] = self.enable_logical_replication + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseProjectSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.enable_logical_replication is not None: + body["enable_logical_replication"] = self.enable_logical_replication + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseProjectSettings: + """Deserializes the DatabaseProjectSettings from a dictionary.""" + return cls(enable_logical_replication=d.get("enable_logical_replication", None)) + + @dataclass class DatabaseTable: """Next field marker: 13""" @@ -635,6 +1304,41 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: ) +@dataclass +class ListDatabaseBranchesResponse: + database_branches: Optional[List[DatabaseBranch]] = None + """List of branches.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseBranchesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_branches: + body["database_branches"] = [v.as_dict() for v in self.database_branches] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseBranchesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_branches: + body["database_branches"] = self.database_branches + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseBranchesResponse: + """Deserializes the ListDatabaseBranchesResponse from a dictionary.""" + return cls( + database_branches=_repeated_dict(d, "database_branches", DatabaseBranch), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseCatalogsResponse: database_catalogs: Optional[List[DatabaseCatalog]] = None @@ -669,6 +1373,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseCatalogsResponse: ) +@dataclass +class ListDatabaseEndpointsResponse: + database_endpoints: Optional[List[DatabaseEndpoint]] = None + """List of endpoints.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseEndpointsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_endpoints: + body["database_endpoints"] = [v.as_dict() for v in self.database_endpoints] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseEndpointsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_endpoints: + body["database_endpoints"] = self.database_endpoints + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseEndpointsResponse: + """Deserializes the ListDatabaseEndpointsResponse from a dictionary.""" + return cls( + database_endpoints=_repeated_dict(d, "database_endpoints", DatabaseEndpoint), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseInstanceRolesResponse: database_instance_roles: Optional[List[DatabaseInstanceRole]] = None @@ -699,42 +1438,77 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstanceRolesResponse: """Deserializes the ListDatabaseInstanceRolesResponse from a dictionary.""" return cls( - database_instance_roles=_repeated_dict(d, "database_instance_roles", DatabaseInstanceRole), + database_instance_roles=_repeated_dict(d, "database_instance_roles", DatabaseInstanceRole), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListDatabaseInstancesResponse: + database_instances: Optional[List[DatabaseInstance]] = None + """List of instances.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_instances: + body["database_instances"] = [v.as_dict() for v in self.database_instances] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_instances: + body["database_instances"] = self.database_instances + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: + """Deserializes the ListDatabaseInstancesResponse from a dictionary.""" + return cls( + database_instances=_repeated_dict(d, "database_instances", DatabaseInstance), next_page_token=d.get("next_page_token", None), ) @dataclass -class ListDatabaseInstancesResponse: - database_instances: Optional[List[DatabaseInstance]] = None - """List of instances.""" +class ListDatabaseProjectsResponse: + database_projects: Optional[List[DatabaseProject]] = None + """List of projects.""" next_page_token: Optional[str] = None """Pagination token to request the next page of instances.""" def as_dict(self) -> dict: - """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListDatabaseProjectsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.database_instances: - body["database_instances"] = [v.as_dict() for v in self.database_instances] + if self.database_projects: + body["database_projects"] = [v.as_dict() for v in self.database_projects] if self.next_page_token is not None: body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListDatabaseProjectsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.database_instances: - body["database_instances"] = self.database_instances + if self.database_projects: + body["database_projects"] = self.database_projects if self.next_page_token is not None: body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: - """Deserializes the ListDatabaseInstancesResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseProjectsResponse: + """Deserializes the ListDatabaseProjectsResponse from a dictionary.""" return cls( - database_instances=_repeated_dict(d, "database_instances", DatabaseInstance), + database_projects=_repeated_dict(d, "database_projects", DatabaseProject), next_page_token=d.get("next_page_token", None), ) @@ -1582,22 +2356,32 @@ def create_database_instance_and_wait( return self.create_database_instance(database_instance=database_instance).result(timeout=timeout) def create_database_instance_role( - self, instance_name: str, database_instance_role: DatabaseInstanceRole + self, + instance_name: str, + database_instance_role: DatabaseInstanceRole, + *, + database_instance_name: Optional[str] = None, ) -> DatabaseInstanceRole: """Create a role for a Database Instance. :param instance_name: str :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) :returns: :class:`DatabaseInstanceRole` """ body = database_instance_role.as_dict() + query = {} + if database_instance_name is not None: + query["database_instance_name"] = database_instance_name headers = { "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers) + res = self._api.do( + "POST", f"/api/2.0/database/instances/{instance_name}/roles", query=query, body=body, headers=headers + ) return DatabaseInstanceRole.from_dict(res) def create_database_table(self, table: DatabaseTable) -> DatabaseTable: @@ -1656,12 +2440,8 @@ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, p By default, a instance cannot be deleted if it has descendant instances created via PITR. If this flag is specified as true, all descendent instances will be deleted as well. :param purge: bool (optional) - Note purge=false is in development. If false, the database instance is soft deleted (implementation - pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD - operations nor connected to. However they can be undeleted by calling the undelete API for a limited - time (implementation pending). If true, the database instance is hard deleted and cannot be - undeleted. For the time being, setting this value to true is required to delete an instance (soft - delete is not yet supported). + Deprecated. Omitting the field or setting it to true will result in the field being hard deleted. + Setting a value of false will throw a bad request. """ @@ -1891,7 +2671,7 @@ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: def list_database_catalogs( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseCatalog]: - """List all Database Catalogs within a Database Instance. + """This API is currently unimplemented, but exposed for Terraform support. :param instance_name: str Name of the instance to get database catalogs for. @@ -1926,7 +2706,9 @@ def list_database_catalogs( def list_database_instance_roles( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseInstanceRole]: - """START OF PG ROLE APIs Section + """START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With more + recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These APIs will + remain effectively undocumented/UI-only and we'll aim for a new public roles API as part of V2 PuPr. :param instance_name: str :param page_size: int (optional) @@ -1991,7 +2773,7 @@ def list_database_instances( def list_synced_database_tables( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SyncedDatabaseTable]: - """List all Synced Database Tables within a Database Instance. + """This API is currently unimplemented, but exposed for Terraform support. :param instance_name: str Name of the instance to get synced tables for. @@ -2026,7 +2808,7 @@ def list_synced_database_tables( def update_database_catalog( self, name: str, database_catalog: DatabaseCatalog, update_mask: str ) -> DatabaseCatalog: - """Updated a Database Catalog. + """This API is currently unimplemented, but exposed for Terraform support. :param name: str The name of the catalog in UC. @@ -2058,7 +2840,8 @@ def update_database_instance( The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` :param update_mask: str - The list of fields to update. This field is not yet supported, and is ignored by the server. + The list of fields to update. If unspecified, all fields will be updated when possible. To wipe out + custom_tags, specify custom_tags in the update_mask with an empty custom_tags map. :returns: :class:`DatabaseInstance` """ @@ -2074,10 +2857,46 @@ def update_database_instance( res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) + def update_database_instance_role( + self, + instance_name: str, + name: str, + database_instance_role: DatabaseInstanceRole, + *, + database_instance_name: Optional[str] = None, + ) -> DatabaseInstanceRole: + """Update a role for a Database Instance. + + :param instance_name: str + :param name: str + The name of the role. This is the unique identifier for the role in an instance. + :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) + + :returns: :class:`DatabaseInstanceRole` + """ + body = database_instance_role.as_dict() + query = {} + if database_instance_name is not None: + query["database_instance_name"] = database_instance_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/instances/{instance_name}/roles/{name}", + query=query, + body=body, + headers=headers, + ) + return DatabaseInstanceRole.from_dict(res) + def update_synced_database_table( self, name: str, synced_table: SyncedDatabaseTable, update_mask: str ) -> SyncedDatabaseTable: - """Update a Synced Database Table. + """This API is currently unimplemented, but exposed for Terraform support. :param name: str Full three-part (catalog, schema, table) name of the table. @@ -2099,3 +2918,381 @@ def update_synced_database_table( res = self._api.do("PATCH", f"/api/2.0/database/synced_tables/{name}", query=query, body=body, headers=headers) return SyncedDatabaseTable.from_dict(res) + + +class DatabaseProjectAPI: + """Database Projects provide access to a database via REST API or direct SQL.""" + + def __init__(self, api_client): + self._api = api_client + + def create_database_branch(self, project_id: str, database_branch: DatabaseBranch) -> DatabaseBranch: + """Create a Database Branch. + + :param project_id: str + :param database_branch: :class:`DatabaseBranch` + + :returns: :class:`DatabaseBranch` + """ + body = database_branch.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/projects/{project_id}/branches", body=body, headers=headers) + return DatabaseBranch.from_dict(res) + + def create_database_endpoint( + self, project_id: str, branch_id: str, database_endpoint: DatabaseEndpoint + ) -> DatabaseEndpoint: + """Create a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param database_endpoint: :class:`DatabaseEndpoint` + + :returns: :class:`DatabaseEndpoint` + """ + body = database_endpoint.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints", + body=body, + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def create_database_project(self, database_project: DatabaseProject) -> DatabaseProject: + """Create a Database Project. + + :param database_project: :class:`DatabaseProject` + + :returns: :class:`DatabaseProject` + """ + body = database_project.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/database/projects", body=body, headers=headers) + return DatabaseProject.from_dict(res) + + def delete_database_branch(self, project_id: str, branch_id: str): + """Delete a Database Branch. + + :param project_id: str + :param branch_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", headers=headers) + + def delete_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str): + """Delete a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + headers=headers, + ) + + def delete_database_project(self, project_id: str): + """Delete a Database Project. + + :param project_id: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/projects/{project_id}", headers=headers) + + def get_database_branch(self, project_id: str, branch_id: str) -> DatabaseBranch: + """Get a Database Branch. + + :param project_id: str + :param branch_id: str + + :returns: :class:`DatabaseBranch` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", headers=headers) + return DatabaseBranch.from_dict(res) + + def get_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str) -> DatabaseEndpoint: + """Get a Database Endpoint. + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + :returns: :class:`DatabaseEndpoint` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def get_database_project(self, project_id: str) -> DatabaseProject: + """Get a Database Project. + + :param project_id: str + + :returns: :class:`DatabaseProject` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/projects/{project_id}", headers=headers) + return DatabaseProject.from_dict(res) + + def list_database_branches( + self, project_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseBranch]: + """List Database Branches. + + :param project_id: str + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Branches. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseBranch` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/projects/{project_id}/branches", query=query, headers=headers + ) + if "database_branches" in json: + for v in json["database_branches"]: + yield DatabaseBranch.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_database_endpoints( + self, project_id: str, branch_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseEndpoint]: + """List Database Endpoints. + + :param project_id: str + :param branch_id: str + :param page_size: int (optional) + Upper bound for items returned. If specified must be at least 10. + :param page_token: str (optional) + Pagination token to go to the next page of Database Endpoints. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseEndpoint` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints", + query=query, + headers=headers, + ) + if "database_endpoints" in json: + for v in json["database_endpoints"]: + yield DatabaseEndpoint.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_database_projects( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseProject]: + """List Database Instances. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Projects. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseProject` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/database/projects", query=query, headers=headers) + if "database_projects" in json: + for v in json["database_projects"]: + yield DatabaseProject.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def restart_database_endpoint(self, project_id: str, branch_id: str, endpoint_id: str) -> DatabaseEndpoint: + """Restart a Database Endpoint. TODO: should return databricks.longrunning.Operation + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + + :returns: :class:`DatabaseEndpoint` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}/restart", + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def update_database_branch( + self, project_id: str, branch_id: str, database_branch: DatabaseBranch, update_mask: str + ) -> DatabaseBranch: + """Update a Database Branch. + + :param project_id: str + :param branch_id: str + :param database_branch: :class:`DatabaseBranch` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseBranch` + """ + body = database_branch.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}", + query=query, + body=body, + headers=headers, + ) + return DatabaseBranch.from_dict(res) + + def update_database_endpoint( + self, project_id: str, branch_id: str, endpoint_id: str, database_endpoint: DatabaseEndpoint, update_mask: str + ) -> DatabaseEndpoint: + """Update a Database Endpoint. TODO: should return databricks.longrunning.Operation { + + :param project_id: str + :param branch_id: str + :param endpoint_id: str + :param database_endpoint: :class:`DatabaseEndpoint` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseEndpoint` + """ + body = database_endpoint.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/database/projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id}", + query=query, + body=body, + headers=headers, + ) + return DatabaseEndpoint.from_dict(res) + + def update_database_project( + self, project_id: str, database_project: DatabaseProject, update_mask: str + ) -> DatabaseProject: + """Update a Database Project. + + :param project_id: str + :param database_project: :class:`DatabaseProject` + :param update_mask: str + The list of fields to update. If unspecified, all fields will be updated when possible. + + :returns: :class:`DatabaseProject` + """ + body = database_project.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/projects/{project_id}", query=query, body=body, headers=headers) + return DatabaseProject.from_dict(res) diff --git a/databricks/sdk/service/dataquality.py b/databricks/sdk/service/dataquality.py new file mode 100755 index 000000000..59757a3f8 --- /dev/null +++ b/databricks/sdk/service/dataquality.py @@ -0,0 +1,1185 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Iterator, List, Optional + +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum + +_LOG = logging.getLogger("databricks.sdk") + + +# all definitions in this file are in alphabetical order + + +class AggregationGranularity(Enum): + """The granularity for aggregating data into time windows based on their timestamp.""" + + AGGREGATION_GRANULARITY_1_DAY = "AGGREGATION_GRANULARITY_1_DAY" + AGGREGATION_GRANULARITY_1_HOUR = "AGGREGATION_GRANULARITY_1_HOUR" + AGGREGATION_GRANULARITY_1_MONTH = "AGGREGATION_GRANULARITY_1_MONTH" + AGGREGATION_GRANULARITY_1_WEEK = "AGGREGATION_GRANULARITY_1_WEEK" + AGGREGATION_GRANULARITY_1_YEAR = "AGGREGATION_GRANULARITY_1_YEAR" + AGGREGATION_GRANULARITY_2_WEEKS = "AGGREGATION_GRANULARITY_2_WEEKS" + AGGREGATION_GRANULARITY_30_MINUTES = "AGGREGATION_GRANULARITY_30_MINUTES" + AGGREGATION_GRANULARITY_3_WEEKS = "AGGREGATION_GRANULARITY_3_WEEKS" + AGGREGATION_GRANULARITY_4_WEEKS = "AGGREGATION_GRANULARITY_4_WEEKS" + AGGREGATION_GRANULARITY_5_MINUTES = "AGGREGATION_GRANULARITY_5_MINUTES" + + +@dataclass +class AnomalyDetectionConfig: + """Anomaly Detection Configurations.""" + + anomaly_detection_workflow_id: Optional[int] = None + """The id of the workflow that detects the anomaly. This field will only be returned in the + Get/Update response, if the request comes from the workspace where this anomaly detection job is + created.""" + + job_type: Optional[AnomalyDetectionJobType] = None + """The type of the last run of the workflow.""" + + publish_health_indicator: Optional[bool] = None + """If the health indicator should be shown.""" + + def as_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id + if self.job_type is not None: + body["job_type"] = self.job_type.value + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anomaly_detection_workflow_id is not None: + body["anomaly_detection_workflow_id"] = self.anomaly_detection_workflow_id + if self.job_type is not None: + body["job_type"] = self.job_type + if self.publish_health_indicator is not None: + body["publish_health_indicator"] = self.publish_health_indicator + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: + """Deserializes the AnomalyDetectionConfig from a dictionary.""" + return cls( + anomaly_detection_workflow_id=d.get("anomaly_detection_workflow_id", None), + job_type=_enum(d, "job_type", AnomalyDetectionJobType), + publish_health_indicator=d.get("publish_health_indicator", None), + ) + + +class AnomalyDetectionJobType(Enum): + """Anomaly Detection job type.""" + + ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN = "ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN" + ANOMALY_DETECTION_JOB_TYPE_NORMAL = "ANOMALY_DETECTION_JOB_TYPE_NORMAL" + + +@dataclass +class CancelRefreshResponse: + """Response to cancelling a refresh.""" + + refresh: Optional[Refresh] = None + """The refresh to cancel.""" + + def as_dict(self) -> dict: + """Serializes the CancelRefreshResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.refresh: + body["refresh"] = self.refresh.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelRefreshResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.refresh: + body["refresh"] = self.refresh + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelRefreshResponse: + """Deserializes the CancelRefreshResponse from a dictionary.""" + return cls(refresh=_from_dict(d, "refresh", Refresh)) + + +@dataclass +class CronSchedule: + """The data quality monitoring workflow cron schedule.""" + + quartz_cron_expression: str + """The expression that determines when to run the monitor. See [examples]. + + [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" + + timezone_id: str + """A Java timezone id. The schedule for a job will be resolved with respect to this timezone. See + `Java TimeZone `_ for details. + The timezone id (e.g., ``America/Los_Angeles``) in which to evaluate the quartz expression.""" + + pause_status: Optional[CronSchedulePauseStatus] = None + """Read only field that indicates whether the schedule is paused or not.""" + + def as_dict(self) -> dict: + """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: + """Deserializes the CronSchedule from a dictionary.""" + return cls( + pause_status=_enum(d, "pause_status", CronSchedulePauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) + + +class CronSchedulePauseStatus(Enum): + """The data quality monitoring workflow cron schedule pause status.""" + + CRON_SCHEDULE_PAUSE_STATUS_PAUSED = "CRON_SCHEDULE_PAUSE_STATUS_PAUSED" + CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED = "CRON_SCHEDULE_PAUSE_STATUS_UNPAUSED" + + +@dataclass +class DataProfilingConfig: + """Data Profiling Configurations.""" + + output_schema_id: str + """ID of the schema where output tables are created.""" + + assets_dir: Optional[str] = None + """Field for specifying the absolute path to a custom directory to store data-monitoring assets. + Normally prepopulated to a default user location via UI and Python APIs.""" + + baseline_table_name: Optional[str] = None + """Baseline table name. Baseline data is used to compute drift from the data in the monitored + `table_name`. The baseline table and the monitored table shall have the same schema.""" + + custom_metrics: Optional[List[DataProfilingCustomMetric]] = None + """Custom metrics.""" + + dashboard_id: Optional[str] = None + """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + PENDING state.""" + + drift_metrics_table_name: Optional[str] = None + """Table that stores drift metrics data. Format: `catalog.schema.table_name`.""" + + effective_warehouse_id: Optional[str] = None + """The warehouse for dashboard creation""" + + inference_log: Optional[InferenceLogConfig] = None + """Configuration for monitoring inference log tables.""" + + latest_monitor_failure_message: Optional[str] = None + """The latest error message for a monitor failure.""" + + monitor_version: Optional[int] = None + """Represents the current monitor configuration version in use. The version will be represented in + a numeric fashion (1,2,3...). The field has flexibility to take on negative values, which can + indicate corrupted monitor_version numbers.""" + + monitored_table_name: Optional[str] = None + """Unity Catalog table to monitor. Format: `catalog.schema.table_name`""" + + notification_settings: Optional[NotificationSettings] = None + """Field for specifying notification settings.""" + + profile_metrics_table_name: Optional[str] = None + """Table that stores profile metrics data. Format: `catalog.schema.table_name`.""" + + schedule: Optional[CronSchedule] = None + """The cron schedule.""" + + skip_builtin_dashboard: Optional[bool] = None + """Whether to skip creating a default dashboard summarizing data quality metrics.""" + + slicing_exprs: Optional[List[str]] = None + """List of column expressions to slice data with for targeted analysis. The data is grouped by each + expression independently, resulting in a separate slice for each predicate and its complements. + For example `slicing_exprs=[“col_1”, “col_2 > 10”]` will generate the following slices: + two slices for `col_2 > 10` (True and False), and one slice per unique value in `col1`. For + high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + + snapshot: Optional[SnapshotConfig] = None + """Configuration for monitoring snapshot tables.""" + + status: Optional[DataProfilingStatus] = None + """The data profiling monitor status.""" + + time_series: Optional[TimeSeriesConfig] = None + """Configuration for monitoring time series tables.""" + + warehouse_id: Optional[str] = None + """Optional argument to specify the warehouse for dashboard creation. If not specified, the first + running warehouse will be used.""" + + def as_dict(self) -> dict: + """Serializes the DataProfilingConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.effective_warehouse_id is not None: + body["effective_warehouse_id"] = self.effective_warehouse_id + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.latest_monitor_failure_message is not None: + body["latest_monitor_failure_message"] = self.latest_monitor_failure_message + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.monitored_table_name is not None: + body["monitored_table_name"] = self.monitored_table_name + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.output_schema_id is not None: + body["output_schema_id"] = self.output_schema_id + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.time_series: + body["time_series"] = self.time_series.as_dict() + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataProfilingConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.effective_warehouse_id is not None: + body["effective_warehouse_id"] = self.effective_warehouse_id + if self.inference_log: + body["inference_log"] = self.inference_log + if self.latest_monitor_failure_message is not None: + body["latest_monitor_failure_message"] = self.latest_monitor_failure_message + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.monitored_table_name is not None: + body["monitored_table_name"] = self.monitored_table_name + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.output_schema_id is not None: + body["output_schema_id"] = self.output_schema_id + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.status is not None: + body["status"] = self.status + if self.time_series: + body["time_series"] = self.time_series + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataProfilingConfig: + """Deserializes the DataProfilingConfig from a dictionary.""" + return cls( + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", DataProfilingCustomMetric), + dashboard_id=d.get("dashboard_id", None), + drift_metrics_table_name=d.get("drift_metrics_table_name", None), + effective_warehouse_id=d.get("effective_warehouse_id", None), + inference_log=_from_dict(d, "inference_log", InferenceLogConfig), + latest_monitor_failure_message=d.get("latest_monitor_failure_message", None), + monitor_version=d.get("monitor_version", None), + monitored_table_name=d.get("monitored_table_name", None), + notification_settings=_from_dict(d, "notification_settings", NotificationSettings), + output_schema_id=d.get("output_schema_id", None), + profile_metrics_table_name=d.get("profile_metrics_table_name", None), + schedule=_from_dict(d, "schedule", CronSchedule), + skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", SnapshotConfig), + status=_enum(d, "status", DataProfilingStatus), + time_series=_from_dict(d, "time_series", TimeSeriesConfig), + warehouse_id=d.get("warehouse_id", None), + ) + + +@dataclass +class DataProfilingCustomMetric: + """Custom metric definition.""" + + name: str + """Name of the metric in the output tables.""" + + definition: str + """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric + definition]. + + [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" + + input_columns: List[str] + """A list of column names in the input table the metric should be computed for. Can use + ``":table"`` to indicate that the metric needs information from multiple columns.""" + + output_data_type: str + """The output type of the custom metric.""" + + type: DataProfilingCustomMetricType + """The type of the custom metric.""" + + def as_dict(self) -> dict: + """Serializes the DataProfilingCustomMetric into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = [v for v in self.input_columns] + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataProfilingCustomMetric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = self.input_columns + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataProfilingCustomMetric: + """Deserializes the DataProfilingCustomMetric from a dictionary.""" + return cls( + definition=d.get("definition", None), + input_columns=d.get("input_columns", None), + name=d.get("name", None), + output_data_type=d.get("output_data_type", None), + type=_enum(d, "type", DataProfilingCustomMetricType), + ) + + +class DataProfilingCustomMetricType(Enum): + """The custom metric type.""" + + DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE = "DATA_PROFILING_CUSTOM_METRIC_TYPE_AGGREGATE" + DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED = "DATA_PROFILING_CUSTOM_METRIC_TYPE_DERIVED" + DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT = "DATA_PROFILING_CUSTOM_METRIC_TYPE_DRIFT" + + +class DataProfilingStatus(Enum): + """The status of the data profiling monitor.""" + + DATA_PROFILING_STATUS_ACTIVE = "DATA_PROFILING_STATUS_ACTIVE" + DATA_PROFILING_STATUS_DELETE_PENDING = "DATA_PROFILING_STATUS_DELETE_PENDING" + DATA_PROFILING_STATUS_ERROR = "DATA_PROFILING_STATUS_ERROR" + DATA_PROFILING_STATUS_FAILED = "DATA_PROFILING_STATUS_FAILED" + DATA_PROFILING_STATUS_PENDING = "DATA_PROFILING_STATUS_PENDING" + + +@dataclass +class InferenceLogConfig: + """Inference log configuration.""" + + problem_type: InferenceProblemType + """Problem type the model aims to solve.""" + + timestamp_column: str + """Column for the timestamp.""" + + granularities: List[AggregationGranularity] + """List of granularities to use when aggregating data into time windows based on their timestamp.""" + + prediction_column: str + """Column for the prediction.""" + + model_id_column: str + """Column for the model identifier.""" + + label_column: Optional[str] = None + """Column for the label.""" + + prediction_probability_column: Optional[str] = None + """Column for prediction probabilities""" + + def as_dict(self) -> dict: + """Serializes the InferenceLogConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.granularities: + body["granularities"] = [v.value for v in self.granularities] + if self.label_column is not None: + body["label_column"] = self.label_column + if self.model_id_column is not None: + body["model_id_column"] = self.model_id_column + if self.prediction_column is not None: + body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column + if self.problem_type is not None: + body["problem_type"] = self.problem_type.value + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the InferenceLogConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: + body["granularities"] = self.granularities + if self.label_column is not None: + body["label_column"] = self.label_column + if self.model_id_column is not None: + body["model_id_column"] = self.model_id_column + if self.prediction_column is not None: + body["prediction_column"] = self.prediction_column + if self.prediction_probability_column is not None: + body["prediction_probability_column"] = self.prediction_probability_column + if self.problem_type is not None: + body["problem_type"] = self.problem_type + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> InferenceLogConfig: + """Deserializes the InferenceLogConfig from a dictionary.""" + return cls( + granularities=_repeated_enum(d, "granularities", AggregationGranularity), + label_column=d.get("label_column", None), + model_id_column=d.get("model_id_column", None), + prediction_column=d.get("prediction_column", None), + prediction_probability_column=d.get("prediction_probability_column", None), + problem_type=_enum(d, "problem_type", InferenceProblemType), + timestamp_column=d.get("timestamp_column", None), + ) + + +class InferenceProblemType(Enum): + """Inference problem type the model aims to solve.""" + + INFERENCE_PROBLEM_TYPE_CLASSIFICATION = "INFERENCE_PROBLEM_TYPE_CLASSIFICATION" + INFERENCE_PROBLEM_TYPE_REGRESSION = "INFERENCE_PROBLEM_TYPE_REGRESSION" + + +@dataclass +class ListMonitorResponse: + """Response for listing Monitors.""" + + monitors: Optional[List[Monitor]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListMonitorResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.monitors: + body["monitors"] = [v.as_dict() for v in self.monitors] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListMonitorResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.monitors: + body["monitors"] = self.monitors + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListMonitorResponse: + """Deserializes the ListMonitorResponse from a dictionary.""" + return cls(monitors=_repeated_dict(d, "monitors", Monitor), next_page_token=d.get("next_page_token", None)) + + +@dataclass +class ListRefreshResponse: + """Response for listing refreshes.""" + + next_page_token: Optional[str] = None + + refreshes: Optional[List[Refresh]] = None + + def as_dict(self) -> dict: + """Serializes the ListRefreshResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.refreshes: + body["refreshes"] = [v.as_dict() for v in self.refreshes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListRefreshResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.refreshes: + body["refreshes"] = self.refreshes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListRefreshResponse: + """Deserializes the ListRefreshResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), refreshes=_repeated_dict(d, "refreshes", Refresh)) + + +@dataclass +class Monitor: + """Monitor for the data quality of unity catalog entities such as schema or table.""" + + object_type: str + """The type of the monitored object. Can be one of the following: schema or table.""" + + object_id: str + """The UUID of the request object. For example, schema id.""" + + anomaly_detection_config: Optional[AnomalyDetectionConfig] = None + """Anomaly Detection Configuration, applicable to `schema` object types.""" + + data_profiling_config: Optional[DataProfilingConfig] = None + """Data Profiling Configuration, applicable to `table` object types""" + + def as_dict(self) -> dict: + """Serializes the Monitor into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config.as_dict() + if self.data_profiling_config: + body["data_profiling_config"] = self.data_profiling_config.as_dict() + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Monitor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config + if self.data_profiling_config: + body["data_profiling_config"] = self.data_profiling_config + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Monitor: + """Deserializes the Monitor from a dictionary.""" + return cls( + anomaly_detection_config=_from_dict(d, "anomaly_detection_config", AnomalyDetectionConfig), + data_profiling_config=_from_dict(d, "data_profiling_config", DataProfilingConfig), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) + + +@dataclass +class NotificationDestination: + """Destination of the data quality monitoring notification.""" + + email_addresses: Optional[List[str]] = None + """The list of email addresses to send the notification to. A maximum of 5 email addresses is + supported.""" + + def as_dict(self) -> dict: + """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email_addresses: + body["email_addresses"] = [v for v in self.email_addresses] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email_addresses: + body["email_addresses"] = self.email_addresses + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NotificationDestination: + """Deserializes the NotificationDestination from a dictionary.""" + return cls(email_addresses=d.get("email_addresses", None)) + + +@dataclass +class NotificationSettings: + """Settings for sending notifications on the data quality monitoring.""" + + on_failure: Optional[NotificationDestination] = None + """Destinations to send notifications on failure/timeout.""" + + def as_dict(self) -> dict: + """Serializes the NotificationSettings into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.on_failure: + body["on_failure"] = self.on_failure.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NotificationSettings into a shallow dictionary of its immediate attributes.""" + body = {} + if self.on_failure: + body["on_failure"] = self.on_failure + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NotificationSettings: + """Deserializes the NotificationSettings from a dictionary.""" + return cls(on_failure=_from_dict(d, "on_failure", NotificationDestination)) + + +@dataclass +class Refresh: + """The Refresh object gives information on a refresh of the data quality monitoring pipeline.""" + + object_type: str + """The type of the monitored object. Can be one of the following: table.""" + + object_id: str + """The UUID of the request object. For example, table id.""" + + end_time_ms: Optional[int] = None + """Time when the refresh ended (milliseconds since 1/1/1970 UTC).""" + + message: Optional[str] = None + """An optional message to give insight into the current state of the refresh (e.g. FAILURE + messages).""" + + refresh_id: Optional[int] = None + """Unique id of the refresh operation.""" + + start_time_ms: Optional[int] = None + """Time when the refresh started (milliseconds since 1/1/1970 UTC).""" + + state: Optional[RefreshState] = None + """The current state of the refresh.""" + + trigger: Optional[RefreshTrigger] = None + """What triggered the refresh.""" + + def as_dict(self) -> dict: + """Serializes the Refresh into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state.value + if self.trigger is not None: + body["trigger"] = self.trigger.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Refresh into a shallow dictionary of its immediate attributes.""" + body = {} + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state + if self.trigger is not None: + body["trigger"] = self.trigger + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Refresh: + """Deserializes the Refresh from a dictionary.""" + return cls( + end_time_ms=d.get("end_time_ms", None), + message=d.get("message", None), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + refresh_id=d.get("refresh_id", None), + start_time_ms=d.get("start_time_ms", None), + state=_enum(d, "state", RefreshState), + trigger=_enum(d, "trigger", RefreshTrigger), + ) + + +class RefreshState(Enum): + """The state of the refresh.""" + + MONITOR_REFRESH_STATE_CANCELED = "MONITOR_REFRESH_STATE_CANCELED" + MONITOR_REFRESH_STATE_FAILED = "MONITOR_REFRESH_STATE_FAILED" + MONITOR_REFRESH_STATE_PENDING = "MONITOR_REFRESH_STATE_PENDING" + MONITOR_REFRESH_STATE_RUNNING = "MONITOR_REFRESH_STATE_RUNNING" + MONITOR_REFRESH_STATE_SUCCESS = "MONITOR_REFRESH_STATE_SUCCESS" + MONITOR_REFRESH_STATE_UNKNOWN = "MONITOR_REFRESH_STATE_UNKNOWN" + + +class RefreshTrigger(Enum): + """The trigger of the refresh.""" + + MONITOR_REFRESH_TRIGGER_DATA_CHANGE = "MONITOR_REFRESH_TRIGGER_DATA_CHANGE" + MONITOR_REFRESH_TRIGGER_MANUAL = "MONITOR_REFRESH_TRIGGER_MANUAL" + MONITOR_REFRESH_TRIGGER_SCHEDULE = "MONITOR_REFRESH_TRIGGER_SCHEDULE" + MONITOR_REFRESH_TRIGGER_UNKNOWN = "MONITOR_REFRESH_TRIGGER_UNKNOWN" + + +@dataclass +class SnapshotConfig: + """Snapshot analysis configuration.""" + + def as_dict(self) -> dict: + """Serializes the SnapshotConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SnapshotConfig into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SnapshotConfig: + """Deserializes the SnapshotConfig from a dictionary.""" + return cls() + + +@dataclass +class TimeSeriesConfig: + """Time series analysis configuration.""" + + timestamp_column: str + """Column for the timestamp.""" + + granularities: List[AggregationGranularity] + """List of granularities to use when aggregating data into time windows based on their timestamp.""" + + def as_dict(self) -> dict: + """Serializes the TimeSeriesConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.granularities: + body["granularities"] = [v.value for v in self.granularities] + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TimeSeriesConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.granularities: + body["granularities"] = self.granularities + if self.timestamp_column is not None: + body["timestamp_column"] = self.timestamp_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TimeSeriesConfig: + """Deserializes the TimeSeriesConfig from a dictionary.""" + return cls( + granularities=_repeated_enum(d, "granularities", AggregationGranularity), + timestamp_column=d.get("timestamp_column", None), + ) + + +class DataQualityAPI: + """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)""" + + def __init__(self, api_client): + self._api = api_client + + def cancel_refresh(self, object_type: str, object_id: str, refresh_id: int) -> CancelRefreshResponse: + """Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + :returns: :class:`CancelRefreshResponse` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}/cancel", + headers=headers, + ) + return CancelRefreshResponse.from_dict(res) + + def create_monitor(self, monitor: Monitor) -> Monitor: + """Create a data quality monitor on a Unity Catalog object. The caller must provide either + `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog, + have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have + **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have + **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's + parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. + + :param monitor: :class:`Monitor` + The monitor to create. + + :returns: :class:`Monitor` + """ + body = monitor.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/data-quality/v1/monitors", body=body, headers=headers) + return Monitor.from_dict(res) + + def create_refresh(self, object_type: str, object_id: str, refresh: Refresh) -> Refresh: + """Creates a refresh. Currently only supported for the `table` `object_type`. + + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the + table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: + - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an + owner of the table + + :param object_type: str + The type of the monitored object. Can be one of the following: table. + :param object_id: str + The UUID of the request object. For example, table id. + :param refresh: :class:`Refresh` + The refresh to create + + :returns: :class:`Refresh` + """ + body = refresh.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes", body=body, headers=headers + ) + return Refresh.from_dict(res) + + def delete_monitor(self, object_type: str, object_id: str): + """Delete a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - be an owner of the table. + + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must + be manually cleaned up (if desired). + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", headers=headers) + + def delete_refresh(self, object_type: str, object_id: str, refresh_id: int): + """(Unimplemented) Delete a refresh + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", headers=headers + ) + + def get_monitor(self, object_type: str, object_id: str) -> Monitor: + """Read a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + The returned information includes configuration values, as well as information on assets created by + the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different + workspace than where the monitor was created. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + + :returns: :class:`Monitor` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", headers=headers) + return Monitor.from_dict(res) + + def get_refresh(self, object_type: str, object_id: str, refresh_id: int) -> Refresh: + """Get data quality monitor refresh. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + + :returns: :class:`Refresh` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", headers=headers + ) + return Refresh.from_dict(res) + + def list_monitor(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Monitor]: + """(Unimplemented) List data quality monitors. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Monitor` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/data-quality/v1/monitors", query=query, headers=headers) + if "monitors" in json: + for v in json["monitors"]: + yield Monitor.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_refresh( + self, object_type: str, object_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Refresh]: + """List data quality monitor refreshes. + + For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - **SELECT** privilege on the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Refresh` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes", + query=query, + headers=headers, + ) + if "refreshes" in json: + for v in json["refreshes"]: + yield Refresh.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_monitor(self, object_type: str, object_id: str, monitor: Monitor, update_mask: str) -> Monitor: + """Update a data quality monitor on Unity Catalog object. + + For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent catalog 2. + have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. + have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on + the table's parent schema - be an owner of the table. + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param monitor: :class:`Monitor` + The monitor to update. + :param update_mask: str + The field mask to specify which fields to update. + + :returns: :class:`Monitor` + """ + body = monitor.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/data-quality/v1/monitors/{object_type}/{object_id}", query=query, body=body, headers=headers + ) + return Monitor.from_dict(res) + + def update_refresh( + self, object_type: str, object_id: str, refresh_id: int, refresh: Refresh, update_mask: str + ) -> Refresh: + """(Unimplemented) Update a refresh + + :param object_type: str + The type of the monitored object. Can be one of the following: schema or table. + :param object_id: str + The UUID of the request object. For example, schema id. + :param refresh_id: int + Unique id of the refresh operation. + :param refresh: :class:`Refresh` + The refresh to update. + :param update_mask: str + The field mask to specify which fields to update. + + :returns: :class:`Refresh` + """ + body = refresh.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/data-quality/v1/monitors/{object_type}/{object_id}/refreshes/{refresh_id}", + query=query, + body=body, + headers=headers, + ) + return Refresh.from_dict(res) diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 09166b04f..a470d7544 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -124,6 +124,244 @@ def from_dict(cls, d: Dict[str, Any]) -> AccessControlResponse: ) +@dataclass +class AccountGroup: + account_id: Optional[str] = None + """Databricks account ID""" + + display_name: Optional[str] = None + """String that represents a human-readable group name""" + + external_id: Optional[str] = None + """external_id should be unique for identifying groups""" + + id: Optional[str] = None + """Databricks group ID""" + + members: Optional[List[ComplexValue]] = None + + meta: Optional[ResourceMeta] = None + """Container for the group identifier. Workspace local versus account.""" + + roles: Optional[List[ComplexValue]] = None + """Indicates if the group has the admin role.""" + + def as_dict(self) -> dict: + """Serializes the AccountGroup into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.members: + body["members"] = [v.as_dict() for v in self.members] + if self.meta: + body["meta"] = self.meta.as_dict() + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountGroup into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.members: + body["members"] = self.members + if self.meta: + body["meta"] = self.meta + if self.roles: + body["roles"] = self.roles + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountGroup: + """Deserializes the AccountGroup from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + display_name=d.get("displayName", None), + external_id=d.get("externalId", None), + id=d.get("id", None), + members=_repeated_dict(d, "members", ComplexValue), + meta=_from_dict(d, "meta", ResourceMeta), + roles=_repeated_dict(d, "roles", ComplexValue), + ) + + +@dataclass +class AccountServicePrincipal: + account_id: Optional[str] = None + """Databricks account ID""" + + active: Optional[bool] = None + """If this user is active""" + + application_id: Optional[str] = None + """UUID relating to the service principal""" + + display_name: Optional[str] = None + """String that represents a concatenation of given and family names.""" + + external_id: Optional[str] = None + + id: Optional[str] = None + """Databricks service principal ID.""" + + roles: Optional[List[ComplexValue]] = None + """Indicates if the group has the admin role.""" + + def as_dict(self) -> dict: + """Serializes the AccountServicePrincipal into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.active is not None: + body["active"] = self.active + if self.application_id is not None: + body["applicationId"] = self.application_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountServicePrincipal into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.active is not None: + body["active"] = self.active + if self.application_id is not None: + body["applicationId"] = self.application_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.roles: + body["roles"] = self.roles + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountServicePrincipal: + """Deserializes the AccountServicePrincipal from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + active=d.get("active", None), + application_id=d.get("applicationId", None), + display_name=d.get("displayName", None), + external_id=d.get("externalId", None), + id=d.get("id", None), + roles=_repeated_dict(d, "roles", ComplexValue), + ) + + +@dataclass +class AccountUser: + account_id: Optional[str] = None + """Databricks account ID""" + + active: Optional[bool] = None + """If this user is active""" + + display_name: Optional[str] = None + """String that represents a concatenation of given and family names. For example `John Smith`.""" + + emails: Optional[List[ComplexValue]] = None + """All the emails associated with the Databricks user.""" + + external_id: Optional[str] = None + """External ID is not currently supported. It is reserved for future use.""" + + id: Optional[str] = None + """Databricks user ID.""" + + name: Optional[Name] = None + + roles: Optional[List[ComplexValue]] = None + """Indicates if the group has the admin role.""" + + user_name: Optional[str] = None + """Email address of the Databricks user.""" + + def as_dict(self) -> dict: + """Serializes the AccountUser into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.active is not None: + body["active"] = self.active + if self.display_name is not None: + body["displayName"] = self.display_name + if self.emails: + body["emails"] = [v.as_dict() for v in self.emails] + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.name: + body["name"] = self.name.as_dict() + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + if self.user_name is not None: + body["userName"] = self.user_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AccountUser into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: + body["account_id"] = self.account_id + if self.active is not None: + body["active"] = self.active + if self.display_name is not None: + body["displayName"] = self.display_name + if self.emails: + body["emails"] = self.emails + if self.external_id is not None: + body["externalId"] = self.external_id + if self.id is not None: + body["id"] = self.id + if self.name: + body["name"] = self.name + if self.roles: + body["roles"] = self.roles + if self.user_name is not None: + body["userName"] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AccountUser: + """Deserializes the AccountUser from a dictionary.""" + return cls( + account_id=d.get("account_id", None), + active=d.get("active", None), + display_name=d.get("displayName", None), + emails=_repeated_dict(d, "emails", ComplexValue), + external_id=d.get("externalId", None), + id=d.get("id", None), + name=_from_dict(d, "name", Name), + roles=_repeated_dict(d, "roles", ComplexValue), + user_name=d.get("userName", None), + ) + + @dataclass class Actor: """represents an identity trying to access a resource - user or a service principal group can be a @@ -425,6 +663,7 @@ class Group: [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" external_id: Optional[str] = None + """external_id should be unique for identifying groups""" groups: Optional[List[ComplexValue]] = None @@ -510,16 +749,13 @@ class GroupSchema(Enum): @dataclass -class ListGroupsResponse: +class ListAccountGroupsResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[Group]] = None + resources: Optional[List[AccountGroup]] = None """User objects returned in the response.""" - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the service principal.""" - start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -527,14 +763,12 @@ class ListGroupsResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAccountGroupsResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -542,14 +776,12 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListAccountGroupsResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -557,33 +789,24 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: - """Deserializes the ListGroupsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListAccountGroupsResponse: + """Deserializes the ListAccountGroupsResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", Group), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), + resources=_repeated_dict(d, "Resources", AccountGroup), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) -class ListResponseSchema(Enum): - - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" - - @dataclass -class ListServicePrincipalResponse: +class ListAccountServicePrincipalsResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[ServicePrincipal]] = None + resources: Optional[List[AccountServicePrincipal]] = None """User objects returned in the response.""" - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the List response.""" - start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -591,14 +814,12 @@ class ListServicePrincipalResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAccountServicePrincipalsResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -606,14 +827,12 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListAccountServicePrincipalsResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -621,34 +840,24 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: - """Deserializes the ListServicePrincipalResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListAccountServicePrincipalsResponse: + """Deserializes the ListAccountServicePrincipalsResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", ServicePrincipal), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), + resources=_repeated_dict(d, "Resources", AccountServicePrincipal), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) -class ListSortOrder(Enum): - - ASCENDING = "ascending" - DESCENDING = "descending" - - @dataclass -class ListUsersResponse: +class ListAccountUsersResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - resources: Optional[List[User]] = None + resources: Optional[List[AccountUser]] = None """User objects returned in the response.""" - schemas: Optional[List[ListResponseSchema]] = None - """The schema of the List response.""" - start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" @@ -656,14 +865,12 @@ class ListUsersResponse: """Total results that match the request filters.""" def as_dict(self) -> dict: - """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAccountUsersResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -671,14 +878,12 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListAccountUsersResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.items_per_page is not None: body["itemsPerPage"] = self.items_per_page if self.resources: body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas if self.start_index is not None: body["startIndex"] = self.start_index if self.total_results is not None: @@ -686,21 +891,208 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: - """Deserializes the ListUsersResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListAccountUsersResponse: + """Deserializes the ListAccountUsersResponse from a dictionary.""" return cls( items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", User), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), + resources=_repeated_dict(d, "Resources", AccountUser), start_index=d.get("startIndex", None), total_results=d.get("totalResults", None), ) @dataclass -class MigratePermissionsResponse: - permissions_migrated: Optional[int] = None - """Number of permissions migrated.""" +class ListGroupsResponse: + items_per_page: Optional[int] = None + """Total results returned in the response.""" + + resources: Optional[List[Group]] = None + """User objects returned in the response.""" + + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the service principal.""" + + start_index: Optional[int] = None + """Starting index of all the results that matched the request filters. First item is number 1.""" + + total_results: Optional[int] = None + """Total results that match the request filters.""" + + def as_dict(self) -> dict: + """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: + """Deserializes the ListGroupsResponse from a dictionary.""" + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", Group), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) + + +class ListResponseSchema(Enum): + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" + + +@dataclass +class ListServicePrincipalResponse: + items_per_page: Optional[int] = None + """Total results returned in the response.""" + + resources: Optional[List[ServicePrincipal]] = None + """User objects returned in the response.""" + + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the List response.""" + + start_index: Optional[int] = None + """Starting index of all the results that matched the request filters. First item is number 1.""" + + total_results: Optional[int] = None + """Total results that match the request filters.""" + + def as_dict(self) -> dict: + """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: + """Deserializes the ListServicePrincipalResponse from a dictionary.""" + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", ServicePrincipal), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) + + +class ListSortOrder(Enum): + + ASCENDING = "ascending" + DESCENDING = "descending" + + +@dataclass +class ListUsersResponse: + items_per_page: Optional[int] = None + """Total results returned in the response.""" + + resources: Optional[List[User]] = None + """User objects returned in the response.""" + + schemas: Optional[List[ListResponseSchema]] = None + """The schema of the List response.""" + + start_index: Optional[int] = None + """Starting index of all the results that matched the request filters. First item is number 1.""" + + total_results: Optional[int] = None + """Total results that match the request filters.""" + + def as_dict(self) -> dict: + """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: + """Deserializes the ListUsersResponse from a dictionary.""" + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", User), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) + + +@dataclass +class MigratePermissionsResponse: + permissions_migrated: Optional[int] = None + """Number of permissions migrated.""" def as_dict(self) -> dict: """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body.""" @@ -1647,24 +2039,6 @@ class ServicePrincipalSchema(Enum): URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - @dataclass class User: active: Optional[bool] = None @@ -1886,7 +2260,8 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service - principal. + principal. `resource=accounts//tagPolicies/` | A resource name for the + tag policy. :returns: :class:`GetAssignableRolesForResourceResponse` """ @@ -1918,6 +2293,8 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: set on the group. `name=accounts//servicePrincipals//ruleSets/default` | A name for a rule set on the service principal. + `name=accounts//tagPolicies//ruleSets/default` | A name for a rule set on + the tag policy. :param etag: str Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from @@ -1997,7 +2374,8 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service - principal. + principal. `resource=accounts//tagPolicies/` | A resource name for the + tag policy. :returns: :class:`GetAssignableRolesForResourceResponse` """ @@ -2026,6 +2404,8 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: set on the group. `name=accounts//servicePrincipals//ruleSets/default` | A name for a rule set on the service principal. + `name=accounts//tagPolicies//ruleSets/default` | A name for a rule set on + the tag policy. :param etag: str Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from @@ -2077,6 +2457,2006 @@ def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetR return RuleSetResponse.from_dict(res) +class AccountGroupsV2API: + """Groups simplify identity management, making it easier to assign access to Databricks account, data, and + other securable objects. + + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, + instead of to users individually. All Databricks account identities can be assigned as members of groups, + and members inherit permissions that are assigned to their group.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + display_name: Optional[str] = None, + external_id: Optional[str] = None, + id: Optional[str] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + ) -> AccountGroup: + """Creates a group in the Databricks account with a unique name, using the supplied group details. + + :param display_name: str (optional) + String that represents a human-readable group name + :param external_id: str (optional) + :param id: str (optional) + Databricks group ID + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + + :returns: :class:`AccountGroup` + """ + body = {} + if display_name is not None: + body["displayName"] = display_name + if external_id is not None: + body["externalId"] = external_id + if id is not None: + body["id"] = id + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", body=body, headers=headers + ) + return AccountGroup.from_dict(res) + + def delete(self, id: str): + """Deletes a group from the Databricks account. + + :param id: str + Unique ID for a group in the Databricks account. + + + """ + + headers = {} + + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) + + def get(self, id: str) -> AccountGroup: + """Gets the information for a specific group in the Databricks account. + + :param id: str + Unique ID for a group in the Databricks account. + + :returns: :class:`AccountGroup` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) + return AccountGroup.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[AccountGroup]: + """Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this endpoint + will not return members. Instead, members should be retrieved by iterating through `Get group + details`. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. Default is 10000. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`AccountGroup` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", query=query, headers=headers + ) + if "Resources" in json: + for v in json["Resources"]: + yield AccountGroup.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a group. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers + ) + + def update( + self, + id: str, + *, + display_name: Optional[str] = None, + external_id: Optional[str] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + ): + """Updates the details of a group by replacing the entire group entity. + + :param id: str + Databricks group ID + :param display_name: str (optional) + String that represents a human-readable group name + :param external_id: str (optional) + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + + + """ + body = {} + if display_name is not None: + body["displayName"] = display_name + if external_id is not None: + body["externalId"] = external_id + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers) + + +class AccountServicePrincipalsV2API: + """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. + Databricks recommends creating service principals to run production jobs or modify production data. If all + processes that act on production data run with service principals, interactive users do not need any + write, delete, or modify privileges in production. This eliminates the risk of a user overwriting + production data by accident.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + external_id: Optional[str] = None, + id: Optional[str] = None, + roles: Optional[List[ComplexValue]] = None, + ) -> AccountServicePrincipal: + """Creates a new service principal in the Databricks account. + + :param active: bool (optional) + If this user is active + :param application_id: str (optional) + UUID relating to the service principal + :param display_name: str (optional) + String that represents a concatenation of given and family names. + :param external_id: str (optional) + :param id: str (optional) + Databricks service principal ID. + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + + :returns: :class:`AccountServicePrincipal` + """ + body = {} + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if external_id is not None: + body["externalId"] = external_id + if id is not None: + body["id"] = id + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", body=body, headers=headers + ) + return AccountServicePrincipal.from_dict(res) + + def delete(self, id: str): + """Delete a single service principal in the Databricks account. + + :param id: str + Unique ID for a service principal in the Databricks account. + + + """ + + headers = {} + + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers + ) + + def get(self, id: str) -> AccountServicePrincipal: + """Gets the details for a single service principal define in the Databricks account. + + :param id: str + Unique ID for a service principal in the Databricks account. + + :returns: :class:`AccountServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers + ) + return AccountServicePrincipal.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[AccountServicePrincipal]: + """Gets the set of service principals associated with a Databricks account. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. Default is 10000. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`AccountServicePrincipal` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", + query=query, + headers=headers, + ) + if "Resources" in json: + for v in json["Resources"]: + yield AccountServicePrincipal.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a single service principal in the Databricks account. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", + body=body, + headers=headers, + ) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + external_id: Optional[str] = None, + roles: Optional[List[ComplexValue]] = None, + ): + """Updates the details of a single service principal. + + This action replaces the existing service principal with the same name. + + :param id: str + Databricks service principal ID. + :param active: bool (optional) + If this user is active + :param application_id: str (optional) + UUID relating to the service principal + :param display_name: str (optional) + String that represents a concatenation of given and family names. + :param external_id: str (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + + + """ + body = {} + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if external_id is not None: + body["externalId"] = external_id + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", + body=body, + headers=headers, + ) + + +class AccountUsersV2API: + """User identities recognized by Databricks and represented by email addresses. + + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity + provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your + identity provider to create users and groups in Databricks account and give them the proper level of + access. When a user leaves your organization or no longer needs access to Databricks account, admins can + terminate the user in your identity provider and that user’s account will also be removed from + Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from + accessing sensitive data.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + id: Optional[str] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + user_name: Optional[str] = None, + ) -> AccountUser: + """Creates a new user in the Databricks account. This new user will also be added to the Databricks + account. + + :param active: bool (optional) + If this user is active + :param display_name: str (optional) + String that represents a concatenation of given and family names. For example `John Smith`. + :param emails: List[:class:`ComplexValue`] (optional) + All the emails associated with the Databricks user. + :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. + :param id: str (optional) + Databricks user ID. + :param name: :class:`Name` (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + :param user_name: str (optional) + Email address of the Databricks user. + + :returns: :class:`AccountUser` + """ + body = {} + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if external_id is not None: + body["externalId"] = external_id + if id is not None: + body["id"] = id + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", body=body, headers=headers + ) + return AccountUser.from_dict(res) + + def delete(self, id: str): + """Deletes a user. Deleting a user from a Databricks account also removes objects associated with the + user. + + :param id: str + Unique ID for a user in the Databricks account. + + + """ + + headers = {} + + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", headers=headers) + + def get( + self, + id: str, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[GetSortOrder] = None, + start_index: Optional[int] = None, + ) -> AccountUser: + """Gets information for a specific user in Databricks account. + + :param id: str + Unique ID for a user in the Databricks account. + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. Default is 10000. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. Multi-part paths are supported. For example, `userName`, + `name.givenName`, and `emails`. + :param sort_order: :class:`GetSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: :class:`AccountUser` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", query=query, headers=headers + ) + return AccountUser.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[AccountUser]: + """Gets details for all the users associated with a Databricks account. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. Default is 10000. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. Multi-part paths are supported. For example, `userName`, + `name.givenName`, and `emails`. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`AccountUser` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", query=query, headers=headers + ) + if "Resources" in json: + for v in json["Resources"]: + yield AccountUser.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates a user resource by applying the supplied operations on specific user attributes. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers + ) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + user_name: Optional[str] = None, + ): + """Replaces a user's information with the data supplied in request. + + :param id: str + Databricks user ID. + :param active: bool (optional) + If this user is active + :param display_name: str (optional) + String that represents a concatenation of given and family names. For example `John Smith`. + :param emails: List[:class:`ComplexValue`] (optional) + All the emails associated with the Databricks user. + :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. + :param name: :class:`Name` (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Indicates if the group has the admin role. + :param user_name: str (optional) + Email address of the Databricks user. + + + """ + body = {} + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if external_id is not None: + body["externalId"] = external_id + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) + + +class CurrentUserAPI: + """This API allows retrieving information about currently authenticated user or service principal.""" + + def __init__(self, api_client): + self._api = api_client + + def me(self) -> User: + """Get details about the current method caller's identity. + + + :returns: :class:`User` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) + return User.from_dict(res) + + +class GroupsV2API: + """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and + other securable objects. + + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, + instead of to users individually. All Databricks workspace identities can be assigned as members of + groups, and members inherit permissions that are assigned to their group.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ) -> Group: + """Creates a group in the Databricks workspace with a unique name, using the supplied group details. + + :param display_name: str (optional) + String that represents a human-readable group name + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param id: str (optional) + Databricks group ID + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`GroupSchema`] (optional) + The schema of the group. + + :returns: :class:`Group` + """ + body = {} + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/Groups", body=body, headers=headers) + return Group.from_dict(res) + + def delete(self, id: str): + """Deletes a group from the Databricks workspace. + + :param id: str + Unique ID for a group in the Databricks workspace. + + + """ + + headers = {} + + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) + + def get(self, id: str) -> Group: + """Gets the information for a specific group in the Databricks workspace. + + :param id: str + Unique ID for a group in the Databricks workspace. + + :returns: :class:`Group` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) + return Group.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[Group]: + """Gets all details of the groups associated with the Databricks workspace. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`Group` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + yield Group.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a group. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + + def update( + self, + id: str, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ): + """Updates the details of a group by replacing the entire group entity. + + :param id: str + Databricks group ID + :param display_name: str (optional) + String that represents a human-readable group name + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`GroupSchema`] (optional) + The schema of the group. + + + """ + body = {} + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + + +class PermissionMigrationAPI: + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" + + def __init__(self, api_client): + self._api = api_client + + def migrate_permissions( + self, + workspace_id: int, + from_workspace_group_name: str, + to_account_group_name: str, + *, + size: Optional[int] = None, + ) -> MigratePermissionsResponse: + """Migrate Permissions. + + :param workspace_id: int + WorkspaceId of the associated workspace where the permission migration will occur. + :param from_workspace_group_name: str + The name of the workspace group that permissions will be migrated from. + :param to_account_group_name: str + The name of the account group that permissions will be migrated to. + :param size: int (optional) + The maximum number of permissions that will be migrated. + + :returns: :class:`MigratePermissionsResponse` + """ + body = {} + if from_workspace_group_name is not None: + body["from_workspace_group_name"] = from_workspace_group_name + if size is not None: + body["size"] = size + if to_account_group_name is not None: + body["to_account_group_name"] = to_account_group_name + if workspace_id is not None: + body["workspace_id"] = workspace_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) + return MigratePermissionsResponse.from_dict(res) + + +class PermissionsAPI: + """Permissions API are used to create read, write, edit, update and manage access for various users on + different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage + or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or + attach to clusters. * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users + can use cluster policies. * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage + which users can view, manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job + permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a job. * + **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or manage + MLflow experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which + users can read, edit, or manage MLflow registered models. * **[Instance Pool + permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * **[Repo + permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * **[Serving endpoint + permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a serving + endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage + SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or + use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, + edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of + the required permissions for specific actions or abilities and other important information, see [Access + Control]. Note that to manage access control on service principals, use **[Account Access Control + Proxy](:service:accountaccesscontrolproxy)**. + + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" + + def __init__(self, api_client): + self._api = api_client + + def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: + """Gets the permissions of an object. Objects can inherit permissions from their parent objects or root + object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + + :returns: :class:`ObjectPermissions` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) + return ObjectPermissions.from_dict(res) + + def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: + """Gets the permission levels that a user can have on an object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + + :returns: :class:`GetPermissionLevelsResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers + ) + return GetPermissionLevelsResponse.from_dict(res) + + def set( + self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their parent objects or root + object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + :param access_control_list: List[:class:`AccessControlRequest`] (optional) + + :returns: :class:`ObjectPermissions` + """ + body = {} + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) + return ObjectPermissions.from_dict(res) + + def update( + self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: + """Updates the permissions on an object. Objects can inherit permissions from their parent objects or + root object. + + :param request_object_type: str + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. + :param request_object_id: str + The id of the request object. + :param access_control_list: List[:class:`AccessControlRequest`] (optional) + + :returns: :class:`ObjectPermissions` + """ + body = {} + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) + return ObjectPermissions.from_dict(res) + + +class ServicePrincipalsV2API: + """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. + Databricks recommends creating service principals to run production jobs or modify production data. If all + processes that act on production data run with service principals, interactive users do not need any + write, delete, or modify privileges in production. This eliminates the risk of a user overwriting + production data by accident.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ) -> ServicePrincipal: + """Creates a new service principal in the Databricks workspace. + + :param active: bool (optional) + If this user is active + :param application_id: str (optional) + UUID relating to the service principal + :param display_name: str (optional) + String that represents a concatenation of given and family names. + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param id: str (optional) + Databricks service principal ID. + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`ServicePrincipalSchema`] (optional) + The schema of the List response. + + :returns: :class:`ServicePrincipal` + """ + body = {} + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/ServicePrincipals", body=body, headers=headers) + return ServicePrincipal.from_dict(res) + + def delete(self, id: str): + """Delete a single service principal in the Databricks workspace. + + :param id: str + Unique ID for a service principal in the Databricks workspace. + + + """ + + headers = {} + + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) + + def get(self, id: str) -> ServicePrincipal: + """Gets the details for a single service principal define in the Databricks workspace. + + :param id: str + Unique ID for a service principal in the Databricks workspace. + + :returns: :class:`ServicePrincipal` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) + return ServicePrincipal.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[ServicePrincipal]: + """Gets the set of service principals associated with a Databricks workspace. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`ServicePrincipal` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do("GET", "/api/2.0/preview/scim/v2/ServicePrincipals", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + yield ServicePrincipal.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a single service principal in the Databricks workspace. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ): + """Updates the details of a single service principal. + + This action replaces the existing service principal with the same name. + + :param id: str + Databricks service principal ID. + :param active: bool (optional) + If this user is active + :param application_id: str (optional) + UUID relating to the service principal + :param display_name: str (optional) + String that represents a concatenation of given and family names. + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`ServicePrincipalSchema`] (optional) + The schema of the List response. + + + """ + body = {} + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) + + +class UsersV2API: + """User identities recognized by Databricks and represented by email addresses. + + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity + provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your + identity provider to create users and groups in Databricks workspace and give them the proper level of + access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can + terminate the user in your identity provider and that user’s account will also be removed from + Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from + accessing sensitive data.""" + + def __init__(self, api_client): + self._api = api_client + + def create( + self, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ) -> User: + """Creates a new user in the Databricks workspace. This new user will also be added to the Databricks + account. + + :param active: bool (optional) + If this user is active + :param display_name: str (optional) + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation + :param emails: List[:class:`ComplexValue`] (optional) + All the emails associated with the Databricks user. + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. + :param groups: List[:class:`ComplexValue`] (optional) + :param id: str (optional) + Databricks user ID. + :param name: :class:`Name` (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`UserSchema`] (optional) + The schema of the user. + :param user_name: str (optional) + Email address of the Databricks user. + + :returns: :class:`User` + """ + body = {} + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/Users", body=body, headers=headers) + return User.from_dict(res) + + def delete(self, id: str): + """Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the + user. + + :param id: str + Unique ID for a user in the Databricks workspace. + + + """ + + headers = {} + + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Users/{id}", headers=headers) + + def get( + self, + id: str, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[GetSortOrder] = None, + start_index: Optional[int] = None, + ) -> User: + """Gets information for a specific user in Databricks workspace. + + :param id: str + Unique ID for a user in the Databricks workspace. + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. Multi-part paths are supported. For example, `userName`, + `name.givenName`, and `emails`. + :param sort_order: :class:`GetSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: :class:`User` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Users/{id}", query=query, headers=headers) + return User.from_dict(res) + + def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: + """Gets the permission levels that a user can have on an object. + + + :returns: :class:`GetPasswordPermissionLevelsResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords/permissionLevels", headers=headers) + return GetPasswordPermissionLevelsResponse.from_dict(res) + + def get_permissions(self) -> PasswordPermissions: + """Gets the permissions of all passwords. Passwords can inherit permissions from their root object. + + + :returns: :class:`PasswordPermissions` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords", headers=headers) + return PasswordPermissions.from_dict(res) + + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[User]: + """Gets details for all the users associated with a Databricks workspace. + + :param attributes: str (optional) + Comma-separated list of attributes to return in response. + :param count: int (optional) + Desired number of results per page. + :param excluded_attributes: str (optional) + Comma-separated list of attributes to exclude in response. + :param filter: str (optional) + Query by which the results have to be filtered. Supported operators are equals(`eq`), + contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be + formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently + only support simple expressions. + + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 + :param sort_by: str (optional) + Attribute to sort the results. Multi-part paths are supported. For example, `userName`, + `name.givenName`, and `emails`. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. + + :returns: Iterator over :class:`User` + """ + + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Users", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + yield User.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates a user resource by applying the supplied operations on specific user attributes. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. + + + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) + + def set_permissions( + self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None + ) -> PasswordPermissions: + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + permissions if none are specified. Objects can inherit permissions from their root object. + + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) + + :returns: :class:`PasswordPermissions` + """ + body = {} + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) + return PasswordPermissions.from_dict(res) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ): + """Replaces a user's information with the data supplied in request. + + :param id: str + Databricks user ID. + :param active: bool (optional) + If this user is active + :param display_name: str (optional) + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation + :param emails: List[:class:`ComplexValue`] (optional) + All the emails associated with the Databricks user. + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. + :param groups: List[:class:`ComplexValue`] (optional) + :param name: :class:`Name` (optional) + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`UserSchema`] (optional) + The schema of the user. + :param user_name: str (optional) + Email address of the Databricks user. + + + """ + body = {} + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) + + def update_permissions( + self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None + ) -> PasswordPermissions: + """Updates the permissions on all passwords. Passwords can inherit permissions from their root object. + + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) + + :returns: :class:`PasswordPermissions` + """ + body = {} + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) + return PasswordPermissions.from_dict(res) + + +class WorkspaceAssignmentAPI: + """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your + account.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, workspace_id: int, principal_id: int): + """Deletes the workspace permissions assignment in a given account and workspace for the specified + principal. + + :param workspace_id: int + The workspace ID for the account. + :param principal_id: int + The ID of the user, service principal, or group. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + headers=headers, + ) + + def get(self, workspace_id: int) -> WorkspacePermissions: + """Get an array of workspace permissions for the specified account and workspace. + + :param workspace_id: int + The workspace ID. + + :returns: :class:`WorkspacePermissions` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", + headers=headers, + ) + return WorkspacePermissions.from_dict(res) + + def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: + """Get the permission assignments for the specified Databricks account and Databricks workspace. + + :param workspace_id: int + The workspace ID for the account. + + :returns: Iterator over :class:`PermissionAssignment` + """ + + headers = { + "Accept": "application/json", + } + + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", + headers=headers, + ) + parsed = PermissionAssignments.from_dict(json).permission_assignments + return parsed if parsed is not None else [] + + def update( + self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None + ) -> PermissionAssignment: + """Creates or updates the workspace permissions assignment in a given account and workspace for the + specified principal. + + :param workspace_id: int + The workspace ID. + :param principal_id: int + The ID of the user, service principal, or group. + :param permissions: List[:class:`WorkspacePermission`] (optional) + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. + + :returns: :class:`PermissionAssignment` + """ + body = {} + if permissions is not None: + body["permissions"] = [v.value for v in permissions] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + body=body, + headers=headers, + ) + return PermissionAssignment.from_dict(res) + + class AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. @@ -2969,27 +5349,6 @@ def update( self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) -class CurrentUserAPI: - """This API allows retrieving information about currently authenticated user or service principal.""" - - def __init__(self, api_client): - self._api = api_client - - def me(self) -> User: - """Get details about the current method caller's identity. - - - :returns: :class:`User` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) - return User.from_dict(res) - - class GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. @@ -3119,319 +5478,133 @@ def list( only support simple expressions. [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 - :param sort_by: str (optional) - Attribute to sort the results. - :param sort_order: :class:`ListSortOrder` (optional) - The order to sort the results. - :param start_index: int (optional) - Specifies the index of the first result. First item is number 1. - - :returns: Iterator over :class:`Group` - """ - - query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - # deduplicate items that may have been added during iteration - seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 10000 - while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Partially updates the details of a group. - - :param id: str - Unique ID in the Databricks workspace. - :param operations: List[:class:`Patch`] (optional) - :param schemas: List[:class:`PatchSchema`] (optional) - The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - - def update( - self, - id: str, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ): - """Updates the details of a group by replacing the entire group entity. - - :param id: str - Databricks group ID - :param display_name: str (optional) - String that represents a human-readable group name - :param entitlements: List[:class:`ComplexValue`] (optional) - Entitlements assigned to the group. See [assigning entitlements] for a full list of supported - values. - - [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements - :param external_id: str (optional) - :param groups: List[:class:`ComplexValue`] (optional) - :param members: List[:class:`ComplexValue`] (optional) - :param meta: :class:`ResourceMeta` (optional) - Container for the group identifier. Workspace local versus account. - :param roles: List[:class:`ComplexValue`] (optional) - Corresponds to AWS instance profile/arn role. - :param schemas: List[:class:`GroupSchema`] (optional) - The schema of the group. - - - """ - body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - - -class PermissionMigrationAPI: - """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" - - def __init__(self, api_client): - self._api = api_client - - def migrate_permissions( - self, - workspace_id: int, - from_workspace_group_name: str, - to_account_group_name: str, - *, - size: Optional[int] = None, - ) -> MigratePermissionsResponse: - """Migrate Permissions. - - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. - :param from_workspace_group_name: str - The name of the workspace group that permissions will be migrated from. - :param to_account_group_name: str - The name of the account group that permissions will be migrated to. - :param size: int (optional) - The maximum number of permissions that will be migrated. - - :returns: :class:`MigratePermissionsResponse` - """ - body = {} - if from_workspace_group_name is not None: - body["from_workspace_group_name"] = from_workspace_group_name - if size is not None: - body["size"] = size - if to_account_group_name is not None: - body["to_account_group_name"] = to_account_group_name - if workspace_id is not None: - body["workspace_id"] = workspace_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) - return MigratePermissionsResponse.from_dict(res) - - -class PermissionsAPI: - """Permissions API are used to create read, write, edit, update and manage access for various users on - different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage - or use apps. * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or - attach to clusters. * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users - can use cluster policies. * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage - which users can view, manage, run, cancel, or own a Delta Live Tables pipeline. * **[Job - permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a job. * - **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or manage - MLflow experiments. * **[MLflow registered model permissions](:service:modelregistry)** — Manage which - users can read, edit, or manage MLflow registered models. * **[Instance Pool - permissions](:service:instancepools)** — Manage which users can manage or attach to pools. * **[Repo - permissions](repos)** — Manage which users can read, run, edit, or manage a repo. * **[Serving endpoint - permissions](:service:servingendpoints)** — Manage which users can view, query, or manage a serving - endpoint. * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage - SQL warehouses. * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or - use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, - edit, or manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of - the required permissions for specific actions or abilities and other important information, see [Access - Control]. Note that to manage access control on service principals, use **[Account Access Control - Proxy](:service:accountaccesscontrolproxy)**. - - [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" - - def __init__(self, api_client): - self._api = api_client - - def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: - """Gets the permissions of an object. Objects can inherit permissions from their parent objects or root - object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - - :returns: :class:`ObjectPermissions` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) - return ObjectPermissions.from_dict(res) - - def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: - """Gets the permission levels that a user can have on an object. - - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str + :param sort_by: str (optional) + Attribute to sort the results. + :param sort_order: :class:`ListSortOrder` (optional) + The order to sort the results. + :param start_index: int (optional) + Specifies the index of the first result. First item is number 1. - :returns: :class:`GetPermissionLevelsResponse` + :returns: Iterator over :class:`Group` """ + query = {} + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index headers = { "Accept": "application/json", } - res = self._api.do( - "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers - ) - return GetPermissionLevelsResponse.from_dict(res) + # deduplicate items that may have been added during iteration + seen = set() + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 + while True: + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) - def set( - self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct - permissions if none are specified. Objects can inherit permissions from their parent objects or root - object. + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Partially updates the details of a group. + + :param id: str + Unique ID in the Databricks workspace. + :param operations: List[:class:`Patch`] (optional) + :param schemas: List[:class:`PatchSchema`] (optional) + The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) - :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] headers = { - "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do( - "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) - return ObjectPermissions.from_dict(res) + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) def update( self, - request_object_type: str, - request_object_id: str, + id: str, *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Updates the permissions on an object. Objects can inherit permissions from their parent objects or - root object. + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ): + """Updates the details of a group by replacing the entire group entity. + + :param id: str + Databricks group ID + :param display_name: str (optional) + String that represents a human-readable group name + :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements + :param external_id: str (optional) + :param groups: List[:class:`ComplexValue`] (optional) + :param members: List[:class:`ComplexValue`] (optional) + :param meta: :class:`ResourceMeta` (optional) + Container for the group identifier. Workspace local versus account. + :param roles: List[:class:`ComplexValue`] (optional) + Corresponds to AWS instance profile/arn role. + :param schemas: List[:class:`GroupSchema`] (optional) + The schema of the group. - :param request_object_type: str - The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, - instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or - warehouses. - :param request_object_id: str - The id of the request object. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) - :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] headers = { - "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do( - "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) - return ObjectPermissions.from_dict(res) + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) class ServicePrincipalsAPI: @@ -4100,109 +6273,3 @@ def update_permissions( res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) return PasswordPermissions.from_dict(res) - - -class WorkspaceAssignmentAPI: - """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your - account.""" - - def __init__(self, api_client): - self._api = api_client - - def delete(self, workspace_id: int, principal_id: int): - """Deletes the workspace permissions assignment in a given account and workspace for the specified - principal. - - :param workspace_id: int - The workspace ID for the account. - :param principal_id: int - The ID of the user, service principal, or group. - - - """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - headers=headers, - ) - - def get(self, workspace_id: int) -> WorkspacePermissions: - """Get an array of workspace permissions for the specified account and workspace. - - :param workspace_id: int - The workspace ID. - - :returns: :class:`WorkspacePermissions` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", - headers=headers, - ) - return WorkspacePermissions.from_dict(res) - - def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: - """Get the permission assignments for the specified Databricks account and Databricks workspace. - - :param workspace_id: int - The workspace ID for the account. - - :returns: Iterator over :class:`PermissionAssignment` - """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", - headers=headers, - ) - parsed = PermissionAssignments.from_dict(json).permission_assignments - return parsed if parsed is not None else [] - - def update( - self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None - ) -> PermissionAssignment: - """Creates or updates the workspace permissions assignment in a given account and workspace for the - specified principal. - - :param workspace_id: int - The workspace ID. - :param principal_id: int - The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] (optional) - Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" - (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values - will be ignored. Note that excluding this field, or providing unsupported values, will have the same - effect as providing an empty list, which will result in the deletion of all permissions for the - principal. - - :returns: :class:`PermissionAssignment` - """ - body = {} - if permissions is not None: - body["permissions"] = [v.value for v in permissions] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - body=body, - headers=headers, - ) - return PermissionAssignment.from_dict(res) diff --git a/databricks/sdk/service/iamv2.py b/databricks/sdk/service/iamv2.py index 243e2fe67..d7ca728fc 100755 --- a/databricks/sdk/service/iamv2.py +++ b/databricks/sdk/service/iamv2.py @@ -218,6 +218,81 @@ class PrincipalType(Enum): USER = "USER" +@dataclass +class ResolveGroupResponse: + group: Optional[Group] = None + """The group that was resolved.""" + + def as_dict(self) -> dict: + """Serializes the ResolveGroupResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.group: + body["group"] = self.group.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ResolveGroupResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.group: + body["group"] = self.group + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ResolveGroupResponse: + """Deserializes the ResolveGroupResponse from a dictionary.""" + return cls(group=_from_dict(d, "group", Group)) + + +@dataclass +class ResolveServicePrincipalResponse: + service_principal: Optional[ServicePrincipal] = None + """The service principal that was resolved.""" + + def as_dict(self) -> dict: + """Serializes the ResolveServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.service_principal: + body["service_principal"] = self.service_principal.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ResolveServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_principal: + body["service_principal"] = self.service_principal + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ResolveServicePrincipalResponse: + """Deserializes the ResolveServicePrincipalResponse from a dictionary.""" + return cls(service_principal=_from_dict(d, "service_principal", ServicePrincipal)) + + +@dataclass +class ResolveUserResponse: + user: Optional[User] = None + """The user that was resolved.""" + + def as_dict(self) -> dict: + """Serializes the ResolveUserResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.user: + body["user"] = self.user.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ResolveUserResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.user: + body["user"] = self.user + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ResolveUserResponse: + """Deserializes the ResolveUserResponse from a dictionary.""" + return cls(user=_from_dict(d, "user", User)) + + @dataclass class ServicePrincipal: """The details of a ServicePrincipal resource.""" @@ -226,7 +301,7 @@ class ServicePrincipal: """The parent account ID for the service principal in Databricks.""" account_sp_status: Optional[State] = None - """The activity status of a sp in a Databricks account.""" + """The activity status of a service principal in a Databricks account.""" application_id: Optional[str] = None """Application ID of the service principal.""" @@ -294,81 +369,6 @@ class State(Enum): INACTIVE = "INACTIVE" -@dataclass -class SyncGroupResponse: - group: Optional[Group] = None - """The group that was synced.""" - - def as_dict(self) -> dict: - """Serializes the SyncGroupResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.group: - body["group"] = self.group.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SyncGroupResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.group: - body["group"] = self.group - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SyncGroupResponse: - """Deserializes the SyncGroupResponse from a dictionary.""" - return cls(group=_from_dict(d, "group", Group)) - - -@dataclass -class SyncServicePrincipalResponse: - service_principal: Optional[ServicePrincipal] = None - """The service principal that was synced.""" - - def as_dict(self) -> dict: - """Serializes the SyncServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.service_principal: - body["service_principal"] = self.service_principal.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SyncServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.service_principal: - body["service_principal"] = self.service_principal - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SyncServicePrincipalResponse: - """Deserializes the SyncServicePrincipalResponse from a dictionary.""" - return cls(service_principal=_from_dict(d, "service_principal", ServicePrincipal)) - - -@dataclass -class SyncUserResponse: - user: Optional[User] = None - """The user that was synced.""" - - def as_dict(self) -> dict: - """Serializes the SyncUserResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.user: - body["user"] = self.user.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SyncUserResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.user: - body["user"] = self.user - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SyncUserResponse: - """Deserializes the SyncUserResponse from a dictionary.""" - return cls(user=_from_dict(d, "user", User)) - - @dataclass class User: """The details of a User resource.""" @@ -928,17 +928,15 @@ def list_workspace_access_details( ) return ListWorkspaceAccessDetailsResponse.from_dict(res) - def sync_group(self, external_id: str) -> SyncGroupResponse: - """Syncs a group with the given external ID from the customer's IdP. If the group does not exist, it will - be created in the account. If the customer is not onboarded onto Automatic Identity Management (AIM), - this will return an error. Synced information is cached for 30 minutes, so subsequent calls to this - method will not result in a full sync unless the cache is stale. If this is triggered while the cache - is still valid, it will return the cached group information. + def resolve_group(self, external_id: str) -> ResolveGroupResponse: + """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it + will be created in the account. If the customer is not onboarded onto Automatic Identity Management + (AIM), this will return an error. :param external_id: str Required. The external ID of the group in the customer's IdP. - :returns: :class:`SyncGroupResponse` + :returns: :class:`ResolveGroupResponse` """ body = {} if external_id is not None: @@ -950,23 +948,21 @@ def sync_group(self, external_id: str) -> SyncGroupResponse: res = self._api.do( "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/groups/syncByExternalId", + f"/api/2.0/identity/accounts/{self._api.account_id}/groups/resolveByExternalId", body=body, headers=headers, ) - return SyncGroupResponse.from_dict(res) + return ResolveGroupResponse.from_dict(res) - def sync_service_principal(self, external_id: str) -> SyncServicePrincipalResponse: - """Syncs a sp with the given external ID from the customer's IdP. If the sp does not exist, it will be - created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will return - an error. Synced information is cached for 30 minutes, so subsequent calls to this method will not - result in a full sync unless the cache is stale. If this is triggered while the cache is still valid, - it will return the cached SP information. + def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipalResponse: + """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. :param external_id: str Required. The external ID of the service principal in the customer's IdP. - :returns: :class:`SyncServicePrincipalResponse` + :returns: :class:`ResolveServicePrincipalResponse` """ body = {} if external_id is not None: @@ -978,23 +974,21 @@ def sync_service_principal(self, external_id: str) -> SyncServicePrincipalRespon res = self._api.do( "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/syncByExternalId", + f"/api/2.0/identity/accounts/{self._api.account_id}/servicePrincipals/resolveByExternalId", body=body, headers=headers, ) - return SyncServicePrincipalResponse.from_dict(res) + return ResolveServicePrincipalResponse.from_dict(res) - def sync_user(self, external_id: str) -> SyncUserResponse: - """Syncs a user with the given external ID from the customer's IdP. If the user does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. Synced information is cached for 30 minutes, so subsequent calls to this method will - not result in a full sync unless the cache is stale. If this is triggered while the cache is still - valid, it will return the cached user information. + def resolve_user(self, external_id: str) -> ResolveUserResponse: + """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it + will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. :param external_id: str Required. The external ID of the user in the customer's IdP. - :returns: :class:`SyncUserResponse` + :returns: :class:`ResolveUserResponse` """ body = {} if external_id is not None: @@ -1006,11 +1000,11 @@ def sync_user(self, external_id: str) -> SyncUserResponse: res = self._api.do( "POST", - f"/api/2.0/identity/accounts/{self._api.account_id}/users/syncByExternalId", + f"/api/2.0/identity/accounts/{self._api.account_id}/users/resolveByExternalId", body=body, headers=headers, ) - return SyncUserResponse.from_dict(res) + return ResolveUserResponse.from_dict(res) def update_group(self, internal_id: int, group: Group, update_mask: str) -> Group: """TODO: Write description later when this method is implemented @@ -1385,7 +1379,7 @@ def list_service_principals_proxy( """TODO: Write description later when this method is implemented :param page_size: int (optional) - The maximum number of sps to return. The service may return fewer than this value. + The maximum number of SPs to return. The service may return fewer than this value. :param page_token: str (optional) A page token, received from a previous ListServicePrincipals call. Provide this to retrieve the subsequent page. @@ -1457,17 +1451,15 @@ def list_workspace_access_details_local( res = self._api.do("GET", "/api/2.0/identity/workspaceAccessDetails", query=query, headers=headers) return ListWorkspaceAccessDetailsResponse.from_dict(res) - def sync_group_proxy(self, external_id: str) -> SyncGroupResponse: - """Syncs a group with the given external ID from the customer's IdP. If the group does not exist, it will - be created in the account. If the customer is not onboarded onto Automatic Identity Management (AIM), - this will return an error. Synced information is cached for 30 minutes, so subsequent calls to this - method will not result in a full sync unless the cache is stale. If this is triggered while the cache - is still valid, it will return the cached group information. + def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: + """Resolves a group with the given external ID from the customer's IdP. If the group does not exist, it + will be created in the account. If the customer is not onboarded onto Automatic Identity Management + (AIM), this will return an error. :param external_id: str Required. The external ID of the group in the customer's IdP. - :returns: :class:`SyncGroupResponse` + :returns: :class:`ResolveGroupResponse` """ body = {} if external_id is not None: @@ -1477,20 +1469,18 @@ def sync_group_proxy(self, external_id: str) -> SyncGroupResponse: "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/identity/groups/syncByExternalId", body=body, headers=headers) - return SyncGroupResponse.from_dict(res) + res = self._api.do("POST", "/api/2.0/identity/groups/resolveByExternalId", body=body, headers=headers) + return ResolveGroupResponse.from_dict(res) - def sync_service_principal_proxy(self, external_id: str) -> SyncServicePrincipalResponse: - """Syncs a sp with the given external ID from the customer's IdP. If the sp does not exist, it will be - created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will return - an error. Synced information is cached for 30 minutes, so subsequent calls to this method will not - result in a full sync unless the cache is stale. If this is triggered while the cache is still valid, - it will return the cached SP information. + def resolve_service_principal_proxy(self, external_id: str) -> ResolveServicePrincipalResponse: + """Resolves an SP with the given external ID from the customer's IdP. If the SP does not exist, it will + be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. :param external_id: str Required. The external ID of the service principal in the customer's IdP. - :returns: :class:`SyncServicePrincipalResponse` + :returns: :class:`ResolveServicePrincipalResponse` """ body = {} if external_id is not None: @@ -1500,20 +1490,20 @@ def sync_service_principal_proxy(self, external_id: str) -> SyncServicePrincipal "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/identity/servicePrincipals/syncByExternalId", body=body, headers=headers) - return SyncServicePrincipalResponse.from_dict(res) + res = self._api.do( + "POST", "/api/2.0/identity/servicePrincipals/resolveByExternalId", body=body, headers=headers + ) + return ResolveServicePrincipalResponse.from_dict(res) - def sync_user_proxy(self, external_id: str) -> SyncUserResponse: - """Syncs a user with the given external ID from the customer's IdP. If the user does not exist, it will - be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will - return an error. Synced information is cached for 30 minutes, so subsequent calls to this method will - not result in a full sync unless the cache is stale. If this is triggered while the cache is still - valid, it will return the cached user information. + def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: + """Resolves a user with the given external ID from the customer's IdP. If the user does not exist, it + will be created. If the customer is not onboarded onto Automatic Identity Management (AIM), this will + return an error. :param external_id: str Required. The external ID of the user in the customer's IdP. - :returns: :class:`SyncUserResponse` + :returns: :class:`ResolveUserResponse` """ body = {} if external_id is not None: @@ -1523,8 +1513,8 @@ def sync_user_proxy(self, external_id: str) -> SyncUserResponse: "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/identity/users/syncByExternalId", body=body, headers=headers) - return SyncUserResponse.from_dict(res) + res = self._api.do("POST", "/api/2.0/identity/users/resolveByExternalId", body=body, headers=headers) + return ResolveUserResponse.from_dict(res) def update_group_proxy(self, internal_id: int, group: Group, update_mask: str) -> Group: """TODO: Write description later when this method is implemented diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 729942d5c..6c35188bc 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -42,6 +42,9 @@ class BaseJob: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" + effective_usage_policy_id: Optional[str] = None + """The id of the usage policy used by this job for cost attribution purposes.""" + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 @@ -50,6 +53,10 @@ class BaseJob: job_id: Optional[int] = None """The canonical identifier for this job.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" @@ -66,10 +73,14 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings.as_dict() if self.trigger_state: @@ -85,10 +96,14 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id + if self.path is not None: + body["path"] = self.path if self.settings: body["settings"] = self.settings if self.trigger_state: @@ -102,8 +117,10 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseJob: created_time=d.get("created_time", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), + path=d.get("path", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), ) @@ -147,6 +164,9 @@ class BaseRun: `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" + effective_usage_policy_id: Optional[str] = None + """The id of the usage policy used by this run for cost attribution purposes.""" + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -267,6 +287,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -338,6 +360,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -403,6 +427,7 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: creator_user_name=d.get("creator_user_name", None), description=d.get("description", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), end_time=d.get("end_time", None), execution_duration=d.get("execution_duration", None), git_source=_from_dict(d, "git_source", GitSource), @@ -1635,9 +1660,7 @@ class ExportRunOutput: views: Optional[List[ViewItem]] = None """The exported content in HTML format (one for every view item). To extract the HTML notebook from - the JSON response, download and run this [Python script]. - - [Python script]: https://docs.databricks.com/en/_static/examples/extract.py""" + the JSON response, download and run this [Python script](/_static/examples/extract.py).""" def as_dict(self) -> dict: """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body.""" @@ -2222,6 +2245,9 @@ class Job: Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" + effective_usage_policy_id: Optional[str] = None + """The id of the usage policy used by this job for cost attribution purposes.""" + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 @@ -2233,6 +2259,10 @@ class Job: next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" + path: Optional[str] = None + """Path of the job object in workspace file tree, including file extension. If absent, the job + doesn't have a workspace object. Example: /Workspace/user@example.com/my_project/my_job.job.json""" + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -2258,12 +2288,16 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2281,12 +2315,16 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.has_more is not None: body["has_more"] = self.has_more if self.job_id is not None: body["job_id"] = self.job_id if self.next_page_token is not None: body["next_page_token"] = self.next_page_token + if self.path is not None: + body["path"] = self.path if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.settings: @@ -2302,9 +2340,11 @@ def from_dict(cls, d: Dict[str, Any]) -> Job: created_time=d.get("created_time", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), has_more=d.get("has_more", None), job_id=d.get("job_id", None), next_page_token=d.get("next_page_token", None), + path=d.get("path", None), run_as_user_name=d.get("run_as_user_name", None), settings=_from_dict(d, "settings", JobSettings), trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), @@ -3007,6 +3047,10 @@ class JobSettings: parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" + parent_path: Optional[str] = None + """Path of the job parent folder in workspace file tree. If absent, the job doesn't have a + workspace object.""" + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -3049,8 +3093,8 @@ class JobSettings: usage_policy_id: Optional[str] = None """The id of the user specified usage policy to use for this job. If not specified, a default usage - policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for - the budget policy used by this workload.""" + policy may be applied when creating or modifying the job. See `effective_usage_policy_id` for + the usage policy used by this workload.""" webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" @@ -3088,6 +3132,8 @@ def as_dict(self) -> dict: body["notification_settings"] = self.notification_settings.as_dict() if self.parameters: body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target.value if self.queue: @@ -3143,6 +3189,8 @@ def as_shallow_dict(self) -> dict: body["notification_settings"] = self.notification_settings if self.parameters: body["parameters"] = self.parameters + if self.parent_path is not None: + body["parent_path"] = self.parent_path if self.performance_target is not None: body["performance_target"] = self.performance_target if self.queue: @@ -3184,6 +3232,7 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSettings: name=d.get("name", None), notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), parameters=_repeated_dict(d, "parameters", JobParameterDefinition), + parent_path=d.get("parent_path", None), performance_target=_enum(d, "performance_target", PerformanceTarget), queue=_from_dict(d, "queue", QueueSettings), run_as=_from_dict(d, "run_as", JobRunAs), @@ -4591,6 +4640,9 @@ class Run: `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" + effective_usage_policy_id: Optional[str] = None + """The id of the usage policy used by this run for cost attribution purposes.""" + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" @@ -4717,6 +4769,8 @@ def as_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target.value + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -4792,6 +4846,8 @@ def as_shallow_dict(self) -> dict: body["description"] = self.description if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.end_time is not None: body["end_time"] = self.end_time if self.execution_duration is not None: @@ -4861,6 +4917,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Run: creator_user_name=d.get("creator_user_name", None), description=d.get("description", None), effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), end_time=d.get("end_time", None), execution_duration=d.get("execution_duration", None), git_source=_from_dict(d, "git_source", GitSource), @@ -5699,7 +5756,7 @@ class RunTask: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. @@ -5737,7 +5794,8 @@ class RunTask: """An optional description for this task.""" disabled: Optional[bool] = None - """Deprecated, field was never used in production.""" + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from @@ -5850,21 +5908,9 @@ class RunTask: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. - This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use - `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark - configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you - _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for - Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value - to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" + """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. + Databricks recommends using the spark_jar_task instead; see [Spark Submit task for + jobs](/jobs/spark-submit).""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -6956,7 +7002,7 @@ class SubmitTask: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the @@ -6983,6 +7029,10 @@ class SubmitTask: description: Optional[str] = None """An optional description for this task.""" + disabled: Optional[bool] = None + """An optional flag to disable the task. If set to true, the task will not run even if it is part + of a job.""" + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" @@ -7043,21 +7093,9 @@ class SubmitTask: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. - This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use - `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark - configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you - _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for - Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value - to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" + """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. + Databricks recommends using the spark_jar_task instead; see [Spark Submit task for + jobs](/jobs/spark-submit).""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -7090,6 +7128,8 @@ def as_dict(self) -> dict: body["depends_on"] = [v.as_dict() for v in self.depends_on] if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications.as_dict() if self.environment_key is not None: @@ -7155,6 +7195,8 @@ def as_shallow_dict(self) -> dict: body["depends_on"] = self.depends_on if self.description is not None: body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled if self.email_notifications: body["email_notifications"] = self.email_notifications if self.environment_key is not None: @@ -7213,6 +7255,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), + disabled=d.get("disabled", None), email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), environment_key=d.get("environment_key", None), existing_cluster_id=d.get("existing_cluster_id", None), @@ -7447,7 +7490,7 @@ class Task: clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. - [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" + [clean rooms]: https://docs.databricks.com/clean-rooms/index.html""" condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the @@ -7564,21 +7607,9 @@ class Task: """The task runs a Python file when the `spark_python_task` field is present.""" spark_submit_task: Optional[SparkSubmitTask] = None - """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. - This task can run only on new clusters and is not compatible with serverless compute. - - In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use - `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark - configurations. - - `master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you - _cannot_ specify them in parameters. - - By default, the Spark submit job uses all available memory (excluding reserved memory for - Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value - to leave some room for off-heap usage. - - The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" + """(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. + Databricks recommends using the spark_jar_task instead; see [Spark Submit task for + jobs](/jobs/spark-submit).""" sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when @@ -7995,6 +8026,8 @@ class TerminationCodeCode(Enum): run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. + * `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will + be retried with a mitigation config. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" @@ -8547,6 +8580,7 @@ def create( name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, + parent_path: Optional[str] = None, performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, @@ -8618,6 +8652,9 @@ def create( `email_notifications` and `webhook_notifications` for this job. :param parameters: List[:class:`JobParameterDefinition`] (optional) Job-level parameter definitions + :param parent_path: str (optional) + Path of the job parent folder in workspace file tree. If absent, the job doesn't have a workspace + object. :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -8652,8 +8689,8 @@ def create( `runNow`. :param usage_policy_id: str (optional) The id of the user specified usage policy to use for this job. If not specified, a default usage - policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the - budget policy used by this workload. + policy may be applied when creating or modifying the job. See `effective_usage_policy_id` for the + usage policy used by this workload. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. @@ -8692,6 +8729,8 @@ def create( body["notification_settings"] = notification_settings.as_dict() if parameters is not None: body["parameters"] = [v.as_dict() for v in parameters] + if parent_path is not None: + body["parent_path"] = parent_path if performance_target is not None: body["performance_target"] = performance_target.value if queue is not None: diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index e83b3dd6f..688afd4c3 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -512,6 +512,30 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateWebhookResponse: return cls(webhook=_from_dict(d, "webhook", RegistryWebhook)) +@dataclass +class DataSource: + delta_table_source: Optional[DeltaTableSource] = None + + def as_dict(self) -> dict: + """Serializes the DataSource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.delta_table_source: + body["delta_table_source"] = self.delta_table_source.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_table_source: + body["delta_table_source"] = self.delta_table_source + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataSource: + """Deserializes the DataSource from a dictionary.""" + return cls(delta_table_source=_from_dict(d, "delta_table_source", DeltaTableSource)) + + @dataclass class Dataset: """Dataset. Represents a reference to data used for training, testing, or evaluation during the @@ -868,6 +892,49 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteWebhookResponse: return cls() +@dataclass +class DeltaTableSource: + full_name: str + """The full three-part (catalog, schema, table) name of the Delta table.""" + + entity_columns: List[str] + """The entity columns of the Delta table.""" + + timeseries_column: str + """The timeseries column of the Delta table.""" + + def as_dict(self) -> dict: + """Serializes the DeltaTableSource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_columns: + body["entity_columns"] = [v for v in self.entity_columns] + if self.full_name is not None: + body["full_name"] = self.full_name + if self.timeseries_column is not None: + body["timeseries_column"] = self.timeseries_column + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeltaTableSource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_columns: + body["entity_columns"] = self.entity_columns + if self.full_name is not None: + body["full_name"] = self.full_name + if self.timeseries_column is not None: + body["timeseries_column"] = self.timeseries_column + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSource: + """Deserializes the DeltaTableSource from a dictionary.""" + return cls( + entity_columns=d.get("entity_columns", None), + full_name=d.get("full_name", None), + timeseries_column=d.get("timeseries_column", None), + ) + + @dataclass class Experiment: """An experiment and its metadata.""" @@ -1212,46 +1279,68 @@ def from_dict(cls, d: Dict[str, Any]) -> ExperimentTag: @dataclass class Feature: - """Feature for model version.""" + full_name: str + """The full three-part name (catalog, schema, name) of the feature.""" - feature_name: Optional[str] = None - """Feature name""" + source: DataSource + """The data source of the feature.""" - feature_table_id: Optional[str] = None - """Feature table id""" + inputs: List[str] + """The input columns from which the feature is computed.""" - feature_table_name: Optional[str] = None - """Feature table name""" + function: Function + """The function by which the feature is computed.""" + + time_window: TimeWindow + """The time window in which the feature is computed.""" + + description: Optional[str] = None + """The description of the feature.""" def as_dict(self) -> dict: """Serializes the Feature into a dictionary suitable for use as a JSON request body.""" body = {} - if self.feature_name is not None: - body["feature_name"] = self.feature_name - if self.feature_table_id is not None: - body["feature_table_id"] = self.feature_table_id - if self.feature_table_name is not None: - body["feature_table_name"] = self.feature_table_name + if self.description is not None: + body["description"] = self.description + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function: + body["function"] = self.function.as_dict() + if self.inputs: + body["inputs"] = [v for v in self.inputs] + if self.source: + body["source"] = self.source.as_dict() + if self.time_window: + body["time_window"] = self.time_window.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Feature into a shallow dictionary of its immediate attributes.""" body = {} - if self.feature_name is not None: - body["feature_name"] = self.feature_name - if self.feature_table_id is not None: - body["feature_table_id"] = self.feature_table_id - if self.feature_table_name is not None: - body["feature_table_name"] = self.feature_table_name + if self.description is not None: + body["description"] = self.description + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function: + body["function"] = self.function + if self.inputs: + body["inputs"] = self.inputs + if self.source: + body["source"] = self.source + if self.time_window: + body["time_window"] = self.time_window return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Feature: """Deserializes the Feature from a dictionary.""" return cls( - feature_name=d.get("feature_name", None), - feature_table_id=d.get("feature_table_id", None), - feature_table_name=d.get("feature_table_name", None), + description=d.get("description", None), + full_name=d.get("full_name", None), + function=_from_dict(d, "function", Function), + inputs=d.get("inputs", None), + source=_from_dict(d, "source", DataSource), + time_window=_from_dict(d, "time_window", TimeWindow), ) @@ -1391,7 +1480,7 @@ def from_dict(cls, d: Dict[str, Any]) -> FeatureLineageOnlineFeature: class FeatureList: """Feature list wrap all the features for a model version""" - features: Optional[List[Feature]] = None + features: Optional[List[LinkedFeature]] = None def as_dict(self) -> dict: """Serializes the FeatureList into a dictionary suitable for use as a JSON request body.""" @@ -1410,7 +1499,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> FeatureList: """Deserializes the FeatureList from a dictionary.""" - return cls(features=_repeated_dict(d, "features", Feature)) + return cls(features=_repeated_dict(d, "features", LinkedFeature)) @dataclass @@ -1565,6 +1654,90 @@ class ForecastingExperimentState(Enum): SUCCEEDED = "SUCCEEDED" +@dataclass +class Function: + function_type: FunctionFunctionType + """The type of the function.""" + + extra_parameters: Optional[List[FunctionExtraParameter]] = None + """Extra parameters for parameterized functions.""" + + def as_dict(self) -> dict: + """Serializes the Function into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.extra_parameters: + body["extra_parameters"] = [v.as_dict() for v in self.extra_parameters] + if self.function_type is not None: + body["function_type"] = self.function_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Function into a shallow dictionary of its immediate attributes.""" + body = {} + if self.extra_parameters: + body["extra_parameters"] = self.extra_parameters + if self.function_type is not None: + body["function_type"] = self.function_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Function: + """Deserializes the Function from a dictionary.""" + return cls( + extra_parameters=_repeated_dict(d, "extra_parameters", FunctionExtraParameter), + function_type=_enum(d, "function_type", FunctionFunctionType), + ) + + +@dataclass +class FunctionExtraParameter: + key: str + """The name of the parameter.""" + + value: str + """The value of the parameter.""" + + def as_dict(self) -> dict: + """Serializes the FunctionExtraParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FunctionExtraParameter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FunctionExtraParameter: + """Deserializes the FunctionExtraParameter from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + +class FunctionFunctionType(Enum): + + APPROX_COUNT_DISTINCT = "APPROX_COUNT_DISTINCT" + APPROX_PERCENTILE = "APPROX_PERCENTILE" + AVG = "AVG" + COUNT = "COUNT" + FIRST = "FIRST" + LAST = "LAST" + MAX = "MAX" + MIN = "MIN" + STDDEV_POP = "STDDEV_POP" + STDDEV_SAMP = "STDDEV_SAMP" + SUM = "SUM" + VAR_POP = "VAR_POP" + VAR_SAMP = "VAR_SAMP" + + @dataclass class GetExperimentByNameResponse: experiment: Optional[Experiment] = None @@ -2079,6 +2252,51 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSpecWithoutSecret: return cls(job_id=d.get("job_id", None), workspace_url=d.get("workspace_url", None)) +@dataclass +class LinkedFeature: + """Feature for model version. ([ML-57150] Renamed from Feature to LinkedFeature)""" + + feature_name: Optional[str] = None + """Feature name""" + + feature_table_id: Optional[str] = None + """Feature table id""" + + feature_table_name: Optional[str] = None + """Feature table name""" + + def as_dict(self) -> dict: + """Serializes the LinkedFeature into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the LinkedFeature into a shallow dictionary of its immediate attributes.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> LinkedFeature: + """Deserializes the LinkedFeature from a dictionary.""" + return cls( + feature_name=d.get("feature_name", None), + feature_table_id=d.get("feature_table_id", None), + feature_table_name=d.get("feature_table_name", None), + ) + + @dataclass class ListArtifactsResponse: files: Optional[List[FileInfo]] = None @@ -2192,6 +2410,38 @@ def from_dict(cls, d: Dict[str, Any]) -> ListFeatureTagsResponse: ) +@dataclass +class ListFeaturesResponse: + features: Optional[List[Feature]] = None + """List of features.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of results for this query.""" + + def as_dict(self) -> dict: + """Serializes the ListFeaturesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.features: + body["features"] = [v.as_dict() for v in self.features] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListFeaturesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.features: + body["features"] = self.features + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListFeaturesResponse: + """Deserializes the ListFeaturesResponse from a dictionary.""" + return cls(features=_repeated_dict(d, "features", Feature), next_page_token=d.get("next_page_token", None)) + + @dataclass class ListModelsResponse: next_page_token: Optional[str] = None @@ -3519,10 +3769,8 @@ class PublishSpec: online_table_name: str """The full three-part (catalog, schema, table) name of the online table.""" - publish_mode: Optional[PublishSpecPublishMode] = None - """The publish mode of the pipeline that syncs the online table with the source table. Defaults to - TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed - (CDF) enabled.""" + publish_mode: PublishSpecPublishMode + """The publish mode of the pipeline that syncs the online table with the source table.""" def as_dict(self) -> dict: """Serializes the PublishSpec into a dictionary suitable for use as a JSON request body.""" @@ -3559,6 +3807,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishSpec: class PublishSpecPublishMode(Enum): CONTINUOUS = "CONTINUOUS" + SNAPSHOT = "SNAPSHOT" TRIGGERED = "TRIGGERED" @@ -4760,6 +5009,38 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: return cls(body=d.get("body", None), status_code=d.get("status_code", None)) +@dataclass +class TimeWindow: + duration: str + """The duration of the time window.""" + + offset: Optional[str] = None + """The offset of the time window.""" + + def as_dict(self) -> dict: + """Serializes the TimeWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.duration is not None: + body["duration"] = self.duration + if self.offset is not None: + body["offset"] = self.offset + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TimeWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.duration is not None: + body["duration"] = self.duration + if self.offset is not None: + body["offset"] = self.offset + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TimeWindow: + """Deserializes the TimeWindow from a dictionary.""" + return cls(duration=d.get("duration", None), offset=d.get("offset", None)) + + @dataclass class TransitionRequest: """For activities, this contains the activity recorded for the action. For comments, this contains @@ -6301,6 +6582,116 @@ def update_run( return UpdateRunResponse.from_dict(res) +class FeatureEngineeringAPI: + """[description]""" + + def __init__(self, api_client): + self._api = api_client + + def create_feature(self, feature: Feature) -> Feature: + """Create a Feature. + + :param feature: :class:`Feature` + Feature to create. + + :returns: :class:`Feature` + """ + body = feature.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/feature-engineering/features", body=body, headers=headers) + return Feature.from_dict(res) + + def delete_feature(self, full_name: str): + """Delete a Feature. + + :param full_name: str + Name of the feature to delete. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers) + + def get_feature(self, full_name: str) -> Feature: + """Get a Feature. + + :param full_name: str + Name of the feature to get. + + :returns: :class:`Feature` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/feature-engineering/features/{full_name}", headers=headers) + return Feature.from_dict(res) + + def list_features(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Feature]: + """List Features. + + :param page_size: int (optional) + The maximum number of results to return. + :param page_token: str (optional) + Pagination token to go to the next page based on a previous query. + + :returns: Iterator over :class:`Feature` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/feature-engineering/features", query=query, headers=headers) + if "features" in json: + for v in json["features"]: + yield Feature.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_feature(self, full_name: str, feature: Feature, update_mask: str) -> Feature: + """Update a Feature. + + :param full_name: str + The full three-part name (catalog, schema, name) of the feature. + :param feature: :class:`Feature` + Feature to update. + :param update_mask: str + The list of fields to update. + + :returns: :class:`Feature` + """ + body = feature.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/feature-engineering/features/{full_name}", query=query, body=body, headers=headers + ) + return Feature.from_dict(res) + + class FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features. Using a feature store also ensures that the code used to compute feature values is the same during model diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 148374800..d8d236711 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -232,11 +232,11 @@ class FederationPolicy: oidc_policy: Optional[OidcFederationPolicy] = None policy_id: Optional[str] = None - """The ID of the federation policy.""" + """The ID of the federation policy. Output only.""" service_principal_id: Optional[int] = None - """The service principal ID that this federation policy applies to. Only set for service principal - federation policies.""" + """The service principal ID that this federation policy applies to. Output only. Only set for + service principal federation policies.""" uid: Optional[str] = None """Unique, immutable id of the federation policy.""" diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index d723956e0..d34c23448 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -21,6 +21,24 @@ # all definitions in this file are in alphabetical order +@dataclass +class ApplyEnvironmentRequestResponse: + def as_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ApplyEnvironmentRequestResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ApplyEnvironmentRequestResponse: + """Deserializes the ApplyEnvironmentRequestResponse from a dictionary.""" + return cls() + + @dataclass class ConnectionParameters: source_catalog: Optional[str] = None @@ -378,6 +396,9 @@ class GetPipelineResponse: effective_budget_policy_id: Optional[str] = None """Serverless budget policy ID of this pipeline.""" + effective_usage_policy_id: Optional[str] = None + """Serverless usage policy ID of the pipeline.""" + health: Optional[GetPipelineResponseHealth] = None """The health of a pipeline.""" @@ -418,6 +439,8 @@ def as_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health.value if self.last_modified is not None: @@ -449,6 +472,8 @@ def as_shallow_dict(self) -> dict: body["creator_user_name"] = self.creator_user_name if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.health is not None: body["health"] = self.health if self.last_modified is not None: @@ -477,6 +502,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: cluster_id=d.get("cluster_id", None), creator_user_name=d.get("creator_user_name", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), health=_enum(d, "health", GetPipelineResponseHealth), last_modified=d.get("last_modified", None), latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), @@ -650,6 +676,10 @@ class IngestionPipelineDefinition: """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" + netsuite_jar_path: Optional[str] = None + """Netsuite only configuration. When the field is set for a netsuite connector, the jar stored in + the field will be validated and added to the classpath of pipeline's cluster.""" + objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" @@ -673,6 +703,8 @@ def as_dict(self) -> dict: body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id + if self.netsuite_jar_path is not None: + body["netsuite_jar_path"] = self.netsuite_jar_path if self.objects: body["objects"] = [v.as_dict() for v in self.objects] if self.source_configurations: @@ -692,6 +724,8 @@ def as_shallow_dict(self) -> dict: body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id + if self.netsuite_jar_path is not None: + body["netsuite_jar_path"] = self.netsuite_jar_path if self.objects: body["objects"] = self.objects if self.source_configurations: @@ -709,6 +743,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: connection_name=d.get("connection_name", None), ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), + netsuite_jar_path=d.get("netsuite_jar_path", None), objects=_repeated_dict(d, "objects", IngestionConfig), source_configurations=_repeated_dict(d, "source_configurations", SourceConfig), source_type=_enum(d, "source_type", IngestionSourceType), @@ -775,11 +810,97 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionTableSpecifi ) +@dataclass +class IngestionPipelineDefinitionWorkdayReportParameters: + incremental: Optional[bool] = None + """(Optional) Marks the report as incremental. This field is deprecated and should not be used. Use + `parameters` instead. The incremental behavior is now controlled by the `parameters` field.""" + + parameters: Optional[Dict[str, str]] = None + """Parameters for the Workday report. Each key represents the parameter name (e.g., "start_date", + "end_date"), and the corresponding value is a SQL-like expression used to compute the parameter + value at runtime. Example: { "start_date": "{ coalesce(current_offset(), date(\"2025-02-01\")) + }", "end_date": "{ current_date() - INTERVAL 1 DAY }" }""" + + report_parameters: Optional[List[IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue]] = None + """(Optional) Additional custom parameters for Workday Report This field is deprecated and should + not be used. Use `parameters` instead.""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionWorkdayReportParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.incremental is not None: + body["incremental"] = self.incremental + if self.parameters: + body["parameters"] = self.parameters + if self.report_parameters: + body["report_parameters"] = [v.as_dict() for v in self.report_parameters] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionWorkdayReportParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.incremental is not None: + body["incremental"] = self.incremental + if self.parameters: + body["parameters"] = self.parameters + if self.report_parameters: + body["report_parameters"] = self.report_parameters + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayReportParameters: + """Deserializes the IngestionPipelineDefinitionWorkdayReportParameters from a dictionary.""" + return cls( + incremental=d.get("incremental", None), + parameters=d.get("parameters", None), + report_parameters=_repeated_dict( + d, "report_parameters", IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue + ), + ) + + +@dataclass +class IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: + key: Optional[str] = None + """Key for the report parameter, can be a column name or other metadata""" + + value: Optional[str] = None + """Value for the report parameter. Possible values it can take are these sql functions: 1. + coalesce(current_offset(), date("YYYY-MM-DD")) -> if current_offset() is null, then the passed + date, else current_offset() 2. current_date() 3. date_sub(current_date(), x) -> subtract x (some + non-negative integer) days from current date""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: + """Deserializes the IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + class IngestionSourceType(Enum): BIGQUERY = "BIGQUERY" CONFLUENCE = "CONFLUENCE" DYNAMICS365 = "DYNAMICS365" + FOREIGN_CATALOG = "FOREIGN_CATALOG" GA4_RAW_DATA = "GA4_RAW_DATA" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" META_MARKETING = "META_MARKETING" @@ -1026,6 +1147,9 @@ class Origin: flow_name: Optional[str] = None """The name of the flow. Not unique.""" + graph_id: Optional[str] = None + """The UUID of the graph associated with this event, corresponding to a GRAPH_UPDATED event.""" + host: Optional[str] = None """The optional host name where the event was triggered""" @@ -1074,6 +1198,8 @@ def as_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host if self.maintenance_id is not None: @@ -1113,6 +1239,8 @@ def as_shallow_dict(self) -> dict: body["flow_id"] = self.flow_id if self.flow_name is not None: body["flow_name"] = self.flow_name + if self.graph_id is not None: + body["graph_id"] = self.graph_id if self.host is not None: body["host"] = self.host if self.maintenance_id is not None: @@ -1147,6 +1275,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Origin: dataset_name=d.get("dataset_name", None), flow_id=d.get("flow_id", None), flow_name=d.get("flow_name", None), + graph_id=d.get("graph_id", None), host=d.get("host", None), maintenance_id=d.get("maintenance_id", None), materialization_name=d.get("materialization_name", None), @@ -1973,6 +2102,9 @@ class PipelineSpec: trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" + usage_policy_id: Optional[str] = None + """Usage policy of this pipeline.""" + def as_dict(self) -> dict: """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2030,6 +2162,8 @@ def as_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -2089,6 +2223,8 @@ def as_shallow_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -2122,6 +2258,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: tags=d.get("tags", None), target=d.get("target", None), trigger=_from_dict(d, "trigger", PipelineTrigger), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -2938,6 +3075,9 @@ class TableSpecificConfig: """The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.""" + workday_report_parameters: Optional[IngestionPipelineDefinitionWorkdayReportParameters] = None + """(Optional) Additional custom parameters for Workday Report""" + def as_dict(self) -> dict: """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2957,6 +3097,8 @@ def as_dict(self) -> dict: body["scd_type"] = self.scd_type.value if self.sequence_by: body["sequence_by"] = [v for v in self.sequence_by] + if self.workday_report_parameters: + body["workday_report_parameters"] = self.workday_report_parameters.as_dict() return body def as_shallow_dict(self) -> dict: @@ -2978,6 +3120,8 @@ def as_shallow_dict(self) -> dict: body["scd_type"] = self.scd_type if self.sequence_by: body["sequence_by"] = self.sequence_by + if self.workday_report_parameters: + body["workday_report_parameters"] = self.workday_report_parameters return body @classmethod @@ -2996,6 +3140,9 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), + workday_report_parameters=_from_dict( + d, "workday_report_parameters", IngestionPipelineDefinitionWorkdayReportParameters + ), ) @@ -3030,6 +3177,10 @@ class UpdateInfo: full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" + mode: Optional[UpdateMode] = None + """Indicates whether the update is either part of a continuous job run, or running in legacy + continuous pipeline mode.""" + pipeline_id: Optional[str] = None """The ID of the pipeline.""" @@ -3063,6 +3214,8 @@ def as_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = [v for v in self.full_refresh_selection] + if self.mode is not None: + body["mode"] = self.mode.value if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id if self.refresh_selection: @@ -3090,6 +3243,8 @@ def as_shallow_dict(self) -> dict: body["full_refresh"] = self.full_refresh if self.full_refresh_selection: body["full_refresh_selection"] = self.full_refresh_selection + if self.mode is not None: + body["mode"] = self.mode if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id if self.refresh_selection: @@ -3112,6 +3267,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateInfo: creation_time=d.get("creation_time", None), full_refresh=d.get("full_refresh", None), full_refresh_selection=d.get("full_refresh_selection", None), + mode=_enum(d, "mode", UpdateMode), pipeline_id=d.get("pipeline_id", None), refresh_selection=d.get("refresh_selection", None), state=_enum(d, "state", UpdateInfoState), @@ -3148,6 +3304,12 @@ class UpdateInfoState(Enum): WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" +class UpdateMode(Enum): + + CONTINUOUS = "CONTINUOUS" + DEFAULT = "DEFAULT" + + @dataclass class UpdateStateInfo: creation_time: Optional[str] = None @@ -3251,6 +3413,22 @@ def wait_get_pipeline_idle( attempt += 1 raise TimeoutError(f"timed out after {timeout}: {status_message}") + def apply_environment(self, pipeline_id: str) -> ApplyEnvironmentRequestResponse: + """* Applies the current pipeline environment onto the pipeline compute. The environment applied can be + used by subsequent dev-mode updates. + + :param pipeline_id: str + + :returns: :class:`ApplyEnvironmentRequestResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/environment/apply", headers=headers) + return ApplyEnvironmentRequestResponse.from_dict(res) + def create( self, *, @@ -3284,6 +3462,7 @@ def create( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ) -> CreatePipelineResponse: """Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. @@ -3354,6 +3533,8 @@ def create( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. :returns: :class:`CreatePipelineResponse` """ @@ -3418,6 +3599,8 @@ def create( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -3800,6 +3983,7 @@ def update( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ): """Updates a pipeline with the supplied configuration. @@ -3873,6 +4057,8 @@ def update( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. """ @@ -3937,6 +4123,8 @@ def update( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 8e34b28f0..985c7cc9b 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -43,21 +43,98 @@ def from_dict(cls, d: Dict[str, Any]) -> AwsCredentials: return cls(sts_role=_from_dict(d, "sts_role", StsRole)) +@dataclass +class AwsDbManagedNetworkExtraInfo: + dhcp_options_id: Optional[str] = None + """This field is need to populate worker env for DB managed VPC. It is likely only for resource + tracking/deletion purpose.""" + + gateway_id: Optional[str] = None + """This is the internal gateway which is different from the NAT gateway in the NPIP VPC Infra. It + is likely only for resource tracking/deletion purpose.""" + + managed_security_group: Optional[str] = None + """Security group which the Vault will control, ensuring that worker_opened_ports are actually + open.""" + + npip_vpc_infra: Optional[NpipVpcInfra] = None + """Resources description for no public IP shard environment.""" + + unmanaged_security_group: Optional[str] = None + """Security group which is given to the user to manage without Databricks interference.""" + + worker_key_contents: Optional[str] = None + """Contents of the secret key which gives ssh access to the workers.""" + + worker_keypair_name: Optional[str] = None + """Name of the keypair in AWS which allows sshing into the workers.""" + + def as_dict(self) -> dict: + """Serializes the AwsDbManagedNetworkExtraInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dhcp_options_id is not None: + body["dhcp_options_id"] = self.dhcp_options_id + if self.gateway_id is not None: + body["gateway_id"] = self.gateway_id + if self.managed_security_group is not None: + body["managed_security_group"] = self.managed_security_group + if self.npip_vpc_infra: + body["npipVpcInfra"] = self.npip_vpc_infra.as_dict() + if self.unmanaged_security_group is not None: + body["unmanaged_security_group"] = self.unmanaged_security_group + if self.worker_key_contents is not None: + body["worker_key_contents"] = self.worker_key_contents + if self.worker_keypair_name is not None: + body["worker_keypair_name"] = self.worker_keypair_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AwsDbManagedNetworkExtraInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dhcp_options_id is not None: + body["dhcp_options_id"] = self.dhcp_options_id + if self.gateway_id is not None: + body["gateway_id"] = self.gateway_id + if self.managed_security_group is not None: + body["managed_security_group"] = self.managed_security_group + if self.npip_vpc_infra: + body["npipVpcInfra"] = self.npip_vpc_infra + if self.unmanaged_security_group is not None: + body["unmanaged_security_group"] = self.unmanaged_security_group + if self.worker_key_contents is not None: + body["worker_key_contents"] = self.worker_key_contents + if self.worker_keypair_name is not None: + body["worker_keypair_name"] = self.worker_keypair_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AwsDbManagedNetworkExtraInfo: + """Deserializes the AwsDbManagedNetworkExtraInfo from a dictionary.""" + return cls( + dhcp_options_id=d.get("dhcp_options_id", None), + gateway_id=d.get("gateway_id", None), + managed_security_group=d.get("managed_security_group", None), + npip_vpc_infra=_from_dict(d, "npipVpcInfra", NpipVpcInfra), + unmanaged_security_group=d.get("unmanaged_security_group", None), + worker_key_contents=d.get("worker_key_contents", None), + worker_keypair_name=d.get("worker_keypair_name", None), + ) + + @dataclass class AwsKeyInfo: key_arn: str - """The AWS KMS key's Amazon Resource Name (ARN).""" + """The the arn of the KMS key.""" key_region: str - """The AWS KMS key region.""" + """The region of the KMS key.""" key_alias: Optional[str] = None - """The AWS KMS key alias.""" + """The alias name of the KMS key.""" reuse_key_for_cluster_volumes: Optional[bool] = None - """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` - or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this - key for encrypting EBS volumes, set to `false`.""" + """Indicates if the key should be used for cluster volumes. Can only be set if the CMK can be used + as a data plane key (use case storage)""" def as_dict(self) -> dict: """Serializes the AwsKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -96,6 +173,150 @@ def from_dict(cls, d: Dict[str, Any]) -> AwsKeyInfo: ) +@dataclass +class AwsNetworkInfo: + db_managed_vpc_extra_info: Optional[AwsDbManagedNetworkExtraInfo] = None + """Additional information for DB managed VPC, which is mainly used to populate WorkerEnvironment.""" + + security_group_ids: Optional[List[str]] = None + """The cloud-provided Security Group IDs that will be determine ingress and egress rules for + Cluster nodes.""" + + subnet_ids: Optional[List[str]] = None + """The cloud-provided Subnet IDs that will be available to Clusters in Workspaces using this + Network.""" + + subnets: Optional[List[SubnetInfo]] = None + """Details information of each individual subnet, including availability_zone and address_space. + This field is populated during workspace creation and used for WorkerEnvironment.""" + + vpc_address_space: Optional[str] = None + """CIDR that used for routing tables and security groups. Example: 10.0.0.0/16. CIDR blocks can now + be inferred from instance metadata during setup so theoretically it is no longer necessary to + populate the `vpcAddressSpace` field. But there is a unknown bug which causes errors when + listing existing clusters and preventing customers from creating new clusters under workspace + `Compute` page. This field is populated during workspace creation and used for + WorkerEnvironment.""" + + vpc_id: Optional[str] = None + """The cloud-provided VPC ID.""" + + def as_dict(self) -> dict: + """Serializes the AwsNetworkInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.db_managed_vpc_extra_info: + body["db_managed_vpc_extra_info"] = self.db_managed_vpc_extra_info.as_dict() + if self.security_group_ids: + body["security_group_ids"] = [v for v in self.security_group_ids] + if self.subnet_ids: + body["subnet_ids"] = [v for v in self.subnet_ids] + if self.subnets: + body["subnets"] = [v.as_dict() for v in self.subnets] + if self.vpc_address_space is not None: + body["vpc_address_space"] = self.vpc_address_space + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AwsNetworkInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.db_managed_vpc_extra_info: + body["db_managed_vpc_extra_info"] = self.db_managed_vpc_extra_info + if self.security_group_ids: + body["security_group_ids"] = self.security_group_ids + if self.subnet_ids: + body["subnet_ids"] = self.subnet_ids + if self.subnets: + body["subnets"] = self.subnets + if self.vpc_address_space is not None: + body["vpc_address_space"] = self.vpc_address_space + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AwsNetworkInfo: + """Deserializes the AwsNetworkInfo from a dictionary.""" + return cls( + db_managed_vpc_extra_info=_from_dict(d, "db_managed_vpc_extra_info", AwsDbManagedNetworkExtraInfo), + security_group_ids=d.get("security_group_ids", None), + subnet_ids=d.get("subnet_ids", None), + subnets=_repeated_dict(d, "subnets", SubnetInfo), + vpc_address_space=d.get("vpc_address_space", None), + vpc_id=d.get("vpc_id", None), + ) + + +@dataclass +class AzureKeyInfo: + disk_encryption_set_id: Optional[str] = None + """The Disk Encryption Set id that is used to represent the key info used for Managed Disk BYOK use + case""" + + key_access_configuration: Optional[KeyAccessConfiguration] = None + """The structure to store key access credential This is set if the Managed Identity is being used + to access the Azure Key Vault key.""" + + key_name: Optional[str] = None + """The name of the key in KeyVault.""" + + key_vault_uri: Optional[str] = None + """The base URI of the KeyVault.""" + + tenant_id: Optional[str] = None + """The tenant id where the KeyVault lives.""" + + version: Optional[str] = None + """The current key version.""" + + def as_dict(self) -> dict: + """Serializes the AzureKeyInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.disk_encryption_set_id is not None: + body["disk_encryption_set_id"] = self.disk_encryption_set_id + if self.key_access_configuration: + body["key_access_configuration"] = self.key_access_configuration.as_dict() + if self.key_name is not None: + body["key_name"] = self.key_name + if self.key_vault_uri is not None: + body["key_vault_uri"] = self.key_vault_uri + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AzureKeyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disk_encryption_set_id is not None: + body["disk_encryption_set_id"] = self.disk_encryption_set_id + if self.key_access_configuration: + body["key_access_configuration"] = self.key_access_configuration + if self.key_name is not None: + body["key_name"] = self.key_name + if self.key_vault_uri is not None: + body["key_vault_uri"] = self.key_vault_uri + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AzureKeyInfo: + """Deserializes the AzureKeyInfo from a dictionary.""" + return cls( + disk_encryption_set_id=d.get("disk_encryption_set_id", None), + key_access_configuration=_from_dict(d, "key_access_configuration", KeyAccessConfiguration), + key_name=d.get("key_name", None), + key_vault_uri=d.get("key_vault_uri", None), + tenant_id=d.get("tenant_id", None), + version=d.get("version", None), + ) + + @dataclass class AzureWorkspaceInfo: resource_group: Optional[str] = None @@ -130,8 +351,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureWorkspaceInfo: @dataclass class CloudResourceContainer: - """The general workspace configurations that are specific to cloud providers.""" - gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None def as_dict(self) -> dict: @@ -157,16 +376,17 @@ def from_dict(cls, d: Dict[str, Any]) -> CloudResourceContainer: @dataclass class CreateAwsKeyInfo: key_arn: str - """The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from - the ARN.""" + """The the arn of the KMS key.""" key_alias: Optional[str] = None - """The AWS KMS key alias.""" + """The alias name of the KMS key.""" + + key_region: Optional[str] = None + """The region of the KMS key.""" reuse_key_for_cluster_volumes: Optional[bool] = None - """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` - or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key also for - encrypting EBS volumes, set this to `false`.""" + """Indicates if the key should be used for cluster volumes. Can only be set if the CMK can be used + as a data plane key (use case storage)""" def as_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a dictionary suitable for use as a JSON request body.""" @@ -175,6 +395,8 @@ def as_dict(self) -> dict: body["key_alias"] = self.key_alias if self.key_arn is not None: body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region if self.reuse_key_for_cluster_volumes is not None: body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @@ -186,6 +408,8 @@ def as_shallow_dict(self) -> dict: body["key_alias"] = self.key_alias if self.key_arn is not None: body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region if self.reuse_key_for_cluster_volumes is not None: body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @@ -196,6 +420,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAwsKeyInfo: return cls( key_alias=d.get("key_alias", None), key_arn=d.get("key_arn", None), + key_region=d.get("key_region", None), reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), ) @@ -226,12 +451,27 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: @dataclass class CreateCredentialStsRole: + """* Use Amazon's STS service to assume a specified IAM role. The `longLivedProvider` is required + to grant permission to assume `roleArn`. As an example, consider the vault creating the vpc in + the customer account. The customer may provide her credentials as a role that we can assume. To + create the VPC, the vault will use the "sts:AssumeRole" permission in its IAM role to assume the + customer role. In this case, the vault's role is the long lived provider. @param roleArn The + role to assume @param externalId An identifier that enables cross account role assumption @param + longLivedProvider The credentials with which to assume the role""" + + external_id: Optional[str] = None + """Note: This must match the external_id on the parent object. + + TODO(j): Add validation to ensure this cannot be updated. If the user can override the + external_id, that defeats the purpose.""" + role_arn: Optional[str] = None - """The Amazon Resource Name (ARN) of the cross account role.""" def as_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a dictionary suitable for use as a JSON request body.""" body = {} + if self.external_id is not None: + body["external_id"] = self.external_id if self.role_arn is not None: body["role_arn"] = self.role_arn return body @@ -239,6 +479,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes.""" body = {} + if self.external_id is not None: + body["external_id"] = self.external_id if self.role_arn is not None: body["role_arn"] = self.role_arn return body @@ -246,17 +488,24 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: """Deserializes the CreateCredentialStsRole from a dictionary.""" - return cls(role_arn=d.get("role_arn", None)) + return cls(external_id=d.get("external_id", None), role_arn=d.get("role_arn", None)) @dataclass class CreateGcpKeyInfo: kms_key_id: str - """The GCP KMS key's resource name""" + """Globally unique kms key resource id of the form + projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4""" + + gcp_service_account: Optional[GcpServiceAccount] = None + """Globally unique service account email that has access to the KMS key. The service account exists + within the Databricks CP project.""" def as_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.gcp_service_account: + body["gcp_service_account"] = self.gcp_service_account.as_dict() if self.kms_key_id is not None: body["kms_key_id"] = self.kms_key_id return body @@ -264,6 +513,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.gcp_service_account: + body["gcp_service_account"] = self.gcp_service_account if self.kms_key_id is not None: body["kms_key_id"] = self.kms_key_id return body @@ -271,7 +522,10 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: """Deserializes the CreateGcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get("kms_key_id", None)) + return cls( + gcp_service_account=_from_dict(d, "gcp_service_account", GcpServiceAccount), + kms_key_id=d.get("kms_key_id", None), + ) @dataclass @@ -332,13 +586,18 @@ def from_dict(cls, d: Dict[str, Any]) -> Credential: ) +class CustomerFacingComputeMode(Enum): + """Corresponds to compute mode defined here: + https://src.dev.databricks.com/databricks/universe@9076536b18479afd639d1c1f9dd5a59f72215e69/-/blob/central/api/common.proto?L872 + """ + + HYBRID = "HYBRID" + SERVERLESS = "SERVERLESS" + + @dataclass class CustomerFacingGcpCloudResourceContainer: - """The general workspace configurations that are specific to Google Cloud.""" - project_id: Optional[str] = None - """The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your - workspace.""" def as_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a dictionary suitable for use as a JSON request body.""" @@ -360,6 +619,12 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingGcpCloudResourceContainer return cls(project_id=d.get("project_id", None)) +class CustomerFacingStorageMode(Enum): + + CUSTOMER_HOSTED = "CUSTOMER_HOSTED" + DEFAULT_STORAGE = "DEFAULT_STORAGE" + + @dataclass class CustomerManagedKey: account_id: Optional[str] = None @@ -367,6 +632,8 @@ class CustomerManagedKey: aws_key_info: Optional[AwsKeyInfo] = None + azure_key_info: Optional[AzureKeyInfo] = None + creation_time: Optional[int] = None """Time in epoch milliseconds when the customer key was created.""" @@ -385,6 +652,8 @@ def as_dict(self) -> dict: body["account_id"] = self.account_id if self.aws_key_info: body["aws_key_info"] = self.aws_key_info.as_dict() + if self.azure_key_info: + body["azure_key_info"] = self.azure_key_info.as_dict() if self.creation_time is not None: body["creation_time"] = self.creation_time if self.customer_managed_key_id is not None: @@ -402,6 +671,8 @@ def as_shallow_dict(self) -> dict: body["account_id"] = self.account_id if self.aws_key_info: body["aws_key_info"] = self.aws_key_info + if self.azure_key_info: + body["azure_key_info"] = self.azure_key_info if self.creation_time is not None: body["creation_time"] = self.creation_time if self.customer_managed_key_id is not None: @@ -418,6 +689,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: return cls( account_id=d.get("account_id", None), aws_key_info=_from_dict(d, "aws_key_info", AwsKeyInfo), + azure_key_info=_from_dict(d, "azure_key_info", AzureKeyInfo), creation_time=d.get("creation_time", None), customer_managed_key_id=d.get("customer_managed_key_id", None), gcp_key_info=_from_dict(d, "gcp_key_info", GcpKeyInfo), @@ -425,37 +697,15 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: ) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - class EndpointUseCase(Enum): - """This enumeration represents the type of Databricks VPC [endpoint service] that was used when - creating this VPC endpoint. - - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" DATAPLANE_RELAY_ACCESS = "DATAPLANE_RELAY_ACCESS" WORKSPACE_ACCESS = "WORKSPACE_ACCESS" class ErrorType(Enum): - """The AWS resource associated with this error: credentials, VPC, subnet, security group, or - network ACL.""" + """ErrorType and WarningType are used to represent the type of error or warning by NetworkHealth + and NetworkWarning defined in central/api/accounts/accounts.proto""" CREDENTIALS = "credentials" NETWORK_ACL = "networkAcl" @@ -475,6 +725,17 @@ class ExternalCustomerInfo: customer_name: Optional[str] = None """The legal entity name for the external workspace""" + opt_out_external_customer_tos_workflow: Optional[bool] = None + + tos_accepted_by_email: Optional[str] = None + """The email of the authoritative user that signed the Terms of service.""" + + tos_accepted_by_full_name: Optional[str] = None + """The full name of the authoritative user that signed the Terms of service.""" + + tos_accepted_timestamp: Optional[int] = None + """Indicates when the Terms of service was signed. None if it has not been signed.""" + def as_dict(self) -> dict: """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body.""" body = {} @@ -484,6 +745,14 @@ def as_dict(self) -> dict: body["authoritative_user_full_name"] = self.authoritative_user_full_name if self.customer_name is not None: body["customer_name"] = self.customer_name + if self.opt_out_external_customer_tos_workflow is not None: + body["opt_out_external_customer_tos_workflow"] = self.opt_out_external_customer_tos_workflow + if self.tos_accepted_by_email is not None: + body["tos_accepted_by_email"] = self.tos_accepted_by_email + if self.tos_accepted_by_full_name is not None: + body["tos_accepted_by_full_name"] = self.tos_accepted_by_full_name + if self.tos_accepted_timestamp is not None: + body["tos_accepted_timestamp"] = self.tos_accepted_timestamp return body def as_shallow_dict(self) -> dict: @@ -495,6 +764,14 @@ def as_shallow_dict(self) -> dict: body["authoritative_user_full_name"] = self.authoritative_user_full_name if self.customer_name is not None: body["customer_name"] = self.customer_name + if self.opt_out_external_customer_tos_workflow is not None: + body["opt_out_external_customer_tos_workflow"] = self.opt_out_external_customer_tos_workflow + if self.tos_accepted_by_email is not None: + body["tos_accepted_by_email"] = self.tos_accepted_by_email + if self.tos_accepted_by_full_name is not None: + body["tos_accepted_by_full_name"] = self.tos_accepted_by_full_name + if self.tos_accepted_timestamp is not None: + body["tos_accepted_timestamp"] = self.tos_accepted_timestamp return body @classmethod @@ -504,17 +781,67 @@ def from_dict(cls, d: Dict[str, Any]) -> ExternalCustomerInfo: authoritative_user_email=d.get("authoritative_user_email", None), authoritative_user_full_name=d.get("authoritative_user_full_name", None), customer_name=d.get("customer_name", None), + opt_out_external_customer_tos_workflow=d.get("opt_out_external_customer_tos_workflow", None), + tos_accepted_by_email=d.get("tos_accepted_by_email", None), + tos_accepted_by_full_name=d.get("tos_accepted_by_full_name", None), + tos_accepted_timestamp=d.get("tos_accepted_timestamp", None), + ) + + +@dataclass +class GcpCommonNetworkConfig: + """The shared network config for GCP workspace. This object has common network configurations that + are network attributions of a workspace. DEPRECATED. Use GkeConfig instead.""" + + gke_cluster_master_ip_range: Optional[str] = None + """The IP range that will be used to allocate GKE cluster master resources from. This field must + not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER.""" + + gke_connectivity_type: Optional[GkeConfigConnectivityType] = None + """The type of network connectivity of the GKE cluster.""" + + def as_dict(self) -> dict: + """Serializes the GcpCommonNetworkConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.gke_cluster_master_ip_range is not None: + body["gke_cluster_master_ip_range"] = self.gke_cluster_master_ip_range + if self.gke_connectivity_type is not None: + body["gke_connectivity_type"] = self.gke_connectivity_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GcpCommonNetworkConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gke_cluster_master_ip_range is not None: + body["gke_cluster_master_ip_range"] = self.gke_cluster_master_ip_range + if self.gke_connectivity_type is not None: + body["gke_connectivity_type"] = self.gke_connectivity_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GcpCommonNetworkConfig: + """Deserializes the GcpCommonNetworkConfig from a dictionary.""" + return cls( + gke_cluster_master_ip_range=d.get("gke_cluster_master_ip_range", None), + gke_connectivity_type=_enum(d, "gke_connectivity_type", GkeConfigConnectivityType), ) @dataclass class GcpKeyInfo: kms_key_id: str - """The GCP KMS key's resource name""" + """Globally unique kms key resource id of the form + projects/testProjectId/locations/us-east4/keyRings/gcpCmkKeyRing/cryptoKeys/cmk-eastus4""" + + gcp_service_account: Optional[GcpServiceAccount] = None + """Globally unique service account email that has access to the KMS key. The service account exists + within the Databricks CP project.""" def as_dict(self) -> dict: """Serializes the GcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.gcp_service_account: + body["gcp_service_account"] = self.gcp_service_account.as_dict() if self.kms_key_id is not None: body["kms_key_id"] = self.kms_key_id return body @@ -522,6 +849,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.gcp_service_account: + body["gcp_service_account"] = self.gcp_service_account if self.kms_key_id is not None: body["kms_key_id"] = self.kms_key_id return body @@ -529,42 +858,25 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpKeyInfo: """Deserializes the GcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get("kms_key_id", None)) + return cls( + gcp_service_account=_from_dict(d, "gcp_service_account", GcpServiceAccount), + kms_key_id=d.get("kms_key_id", None), + ) @dataclass class GcpManagedNetworkConfig: - """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP - range configurations must be mutually exclusive. An attempt to create a workspace fails if - Databricks detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - """ + """The network configuration for the workspace.""" gke_cluster_pod_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than - `/21`.""" + """The IP range that will be used to allocate GKE cluster Pods from.""" gke_cluster_service_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller - than `/27`.""" + """The IP range that will be used to allocate GKE cluster Services from.""" subnet_cidr: Optional[str] = None - """The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than - `/29`.""" + """The IP range which will be used to allocate GKE cluster nodes from. Note: Pods, services and + master IP range must be mutually exclusive.""" def as_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a dictionary suitable for use as a JSON request body.""" @@ -600,29 +912,24 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpManagedNetworkConfig: @dataclass class GcpNetworkInfo: - """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges).""" - network_project_id: str - """The Google Cloud project ID of the VPC network.""" + """The GCP project ID for network resources. This project is where the VPC and subnet resides.""" vpc_id: str - """The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations.""" + """The customer-provided VPC ID.""" subnet_id: str - """The ID of the subnet associated with this network.""" + """The customer-provided Subnet ID that will be available to Clusters in Workspaces using this + Network.""" subnet_region: str - """The Google Cloud region of the workspace data plane (for example, `us-east4`).""" pod_ip_range_name: str - """The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP range - for its pods. This secondary IP range can be used by only one workspace.""" + """Name of the secondary range within the subnet that will be used by GKE as Pod IP range. This is + BYO VPC specific. DB VPC uses network.getGcpManagedNetworkConfig.getGkeClusterPodIpRange""" service_ip_range_name: str - """The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP - range for its services. This secondary IP range can be used by only one workspace.""" + """Name of the secondary range within the subnet that will be used by GKE as Service IP range.""" def as_dict(self) -> dict: """Serializes the GcpNetworkInfo into a dictionary suitable for use as a JSON request body.""" @@ -672,23 +979,40 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpNetworkInfo: @dataclass -class GcpVpcEndpointInfo: - """The Google Cloud specific information for this Private Service Connect endpoint.""" +class GcpServiceAccount: + service_account_email: Optional[str] = None + def as_dict(self) -> dict: + """Serializes the GcpServiceAccount into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.service_account_email is not None: + body["service_account_email"] = self.service_account_email + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GcpServiceAccount into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_account_email is not None: + body["service_account_email"] = self.service_account_email + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GcpServiceAccount: + """Deserializes the GcpServiceAccount from a dictionary.""" + return cls(service_account_email=d.get("service_account_email", None)) + + +@dataclass +class GcpVpcEndpointInfo: project_id: str - """The Google Cloud project ID of the VPC network where the PSC connection resides.""" psc_endpoint_name: str - """The name of the PSC endpoint in the Google Cloud project.""" endpoint_region: str - """Region of the PSC endpoint.""" psc_connection_id: Optional[str] = None - """The unique ID of this PSC connection.""" service_attachment_id: Optional[str] = None - """The service attachment this PSC connection connects to.""" def as_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a dictionary suitable for use as a JSON request body.""" @@ -734,22 +1058,14 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpVpcEndpointInfo: @dataclass class GkeConfig: - """The configurations for the GKE cluster of a Databricks workspace.""" + """The configurations of the GKE cluster used by the GCP workspace.""" connectivity_type: Optional[GkeConfigConnectivityType] = None - """Specifies the network connectivity types for the GKE nodes and the GKE master network. - - Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes - will not have public IPs. - - Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster - have public IP addresses.""" + """The type of network connectivity of the GKE cluster.""" master_ip_range: Optional[str] = None - """The IP range from which to allocate GKE cluster master resources. This field will be ignored if - GKE private cluster is not enabled. - - It must be exactly as big as `/28`.""" + """The IP range that will be used to allocate GKE cluster master resources from. This field must + not be set if gke_cluster_type=PUBLIC_NODE_PUBLIC_MASTER.""" def as_dict(self) -> dict: """Serializes the GkeConfig into a dictionary suitable for use as a JSON request body.""" @@ -791,10 +1107,33 @@ class GkeConfigConnectivityType(Enum): PUBLIC_NODE_PUBLIC_MASTER = "PUBLIC_NODE_PUBLIC_MASTER" +@dataclass +class KeyAccessConfiguration: + """The credential ID that is used to access the key vault.""" + + credential_id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the KeyAccessConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the KeyAccessConfiguration into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> KeyAccessConfiguration: + """Deserializes the KeyAccessConfiguration from a dictionary.""" + return cls(credential_id=d.get("credential_id", None)) + + class KeyUseCase(Enum): - """Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control - plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, - optionally, cluster EBS volumes.""" MANAGED_SERVICES = "MANAGED_SERVICES" STORAGE = "STORAGE" @@ -805,6 +1144,8 @@ class Network: account_id: Optional[str] = None """The Databricks account ID associated with this network configuration.""" + aws_network_info: Optional[AwsNetworkInfo] = None + creation_time: Optional[int] = None """Time in epoch milliseconds when the network was created.""" @@ -820,8 +1161,12 @@ class Network: """The human-readable name of the network configuration.""" security_group_ids: Optional[List[str]] = None + """IDs of one to five security groups associated with this network. Security group IDs **cannot** + be used in multiple network configurations.""" subnet_ids: Optional[List[str]] = None + """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in + multiple network configurations.""" vpc_endpoints: Optional[NetworkVpcEndpoints] = None @@ -842,6 +1187,8 @@ def as_dict(self) -> dict: body = {} if self.account_id is not None: body["account_id"] = self.account_id + if self.aws_network_info: + body["aws_network_info"] = self.aws_network_info.as_dict() if self.creation_time is not None: body["creation_time"] = self.creation_time if self.error_messages: @@ -873,6 +1220,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.account_id is not None: body["account_id"] = self.account_id + if self.aws_network_info: + body["aws_network_info"] = self.aws_network_info if self.creation_time is not None: body["creation_time"] = self.creation_time if self.error_messages: @@ -904,6 +1253,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Network: """Deserializes the Network from a dictionary.""" return cls( account_id=d.get("account_id", None), + aws_network_info=_from_dict(d, "aws_network_info", AwsNetworkInfo), creation_time=d.get("creation_time", None), error_messages=_repeated_dict(d, "error_messages", NetworkHealth), gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), @@ -952,18 +1302,13 @@ def from_dict(cls, d: Dict[str, Any]) -> NetworkHealth: @dataclass class NetworkVpcEndpoints: - """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - [AWS PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - - rest_api: List[str] - """The VPC endpoint ID used by this network to access the Databricks REST API.""" - - dataplane_relay: List[str] + dataplane_relay: Optional[List[str]] = None """The VPC endpoint ID used by this network to access the Databricks secure cluster connectivity relay.""" + rest_api: Optional[List[str]] = None + """The VPC endpoint ID used by this network to access the Databricks REST API.""" + def as_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1019,10 +1364,78 @@ def from_dict(cls, d: Dict[str, Any]) -> NetworkWarning: return cls(warning_message=d.get("warning_message", None), warning_type=_enum(d, "warning_type", WarningType)) -class PricingTier(Enum): - """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. +@dataclass +class NpipVpcInfra: + """Describes AWS resources allocations for NPIP shard environments. Used to track and delete + resources during worker (and shard) environment deletion. Should only be used for MT NPIP shard + environments currently.""" + + nat_eip_allocation_id: Optional[str] = None + """Elastic IP allocation id. Example: eipalloc-0df89abd3b5a548af""" + + nat_gateway_id: Optional[str] = None + """NAT gateway id. Example: nat-0ae5b2f027fe7221a""" + + nat_route_table_association_id: Optional[str] = None + """Route table association id. Example: rtbassoc-089a9a9037542a912""" + + nat_route_table_id: Optional[str] = None + """Route table id. Example: rtb-06118dc3003ee809b""" + + nat_subnet_id: Optional[str] = None + """Subnet id. Example: subnet-0f6f001e243e00c10""" + + nat_vpc_endpoint_id: Optional[str] = None + """VPC endpoint id. Example: vpce-08f210093b4e5ecb5""" - [AWS Pricing]: https://databricks.com/product/aws-pricing""" + def as_dict(self) -> dict: + """Serializes the NpipVpcInfra into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.nat_eip_allocation_id is not None: + body["natEipAllocationId"] = self.nat_eip_allocation_id + if self.nat_gateway_id is not None: + body["natGatewayId"] = self.nat_gateway_id + if self.nat_route_table_association_id is not None: + body["natRouteTableAssociationId"] = self.nat_route_table_association_id + if self.nat_route_table_id is not None: + body["natRouteTableId"] = self.nat_route_table_id + if self.nat_subnet_id is not None: + body["natSubnetId"] = self.nat_subnet_id + if self.nat_vpc_endpoint_id is not None: + body["natVpcEndpointId"] = self.nat_vpc_endpoint_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NpipVpcInfra into a shallow dictionary of its immediate attributes.""" + body = {} + if self.nat_eip_allocation_id is not None: + body["natEipAllocationId"] = self.nat_eip_allocation_id + if self.nat_gateway_id is not None: + body["natGatewayId"] = self.nat_gateway_id + if self.nat_route_table_association_id is not None: + body["natRouteTableAssociationId"] = self.nat_route_table_association_id + if self.nat_route_table_id is not None: + body["natRouteTableId"] = self.nat_route_table_id + if self.nat_subnet_id is not None: + body["natSubnetId"] = self.nat_subnet_id + if self.nat_vpc_endpoint_id is not None: + body["natVpcEndpointId"] = self.nat_vpc_endpoint_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NpipVpcInfra: + """Deserializes the NpipVpcInfra from a dictionary.""" + return cls( + nat_eip_allocation_id=d.get("natEipAllocationId", None), + nat_gateway_id=d.get("natGatewayId", None), + nat_route_table_association_id=d.get("natRouteTableAssociationId", None), + nat_route_table_id=d.get("natRouteTableId", None), + nat_subnet_id=d.get("natSubnetId", None), + nat_vpc_endpoint_id=d.get("natVpcEndpointId", None), + ) + + +class PricingTier(Enum): COMMUNITY_EDITION = "COMMUNITY_EDITION" DEDICATED = "DEDICATED" @@ -1033,40 +1446,40 @@ class PricingTier(Enum): class PrivateAccessLevel(Enum): - """The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`.""" ACCOUNT = "ACCOUNT" + ANY = "ANY" ENDPOINT = "ENDPOINT" + UNKNOWN_ACCESS_LEVEL = "UNKNOWN_ACCESS_LEVEL" @dataclass class PrivateAccessSettings: + """*""" + account_id: Optional[str] = None - """The Databricks account ID that hosts the credential.""" + """The MWS Account in which the Private Access Settings exists.""" allowed_vpc_endpoint_ids: Optional[List[str]] = None - """An array of Databricks VPC endpoint IDs.""" + """The MWS API ID of VPC Endpoints that can access this workspace - only filled if + privateAccessLevel is ENDPOINT""" private_access_level: Optional[PrivateAccessLevel] = None + """The level of isolation of a workspace attached to this settings object""" private_access_settings_id: Optional[str] = None - """Databricks private access settings ID.""" + """The ID in the MWS API of the Private Access Settings.""" private_access_settings_name: Optional[str] = None - """The human-readable name of the private access settings object.""" + """The friendly user-facing name of the Private Access Settings (i.e. jake's private access + settings)""" public_access_enabled: Optional[bool] = None - """Determines if the workspace can be accessed over public internet. For fully private workspaces, - you can optionally specify `false`, but only if you implement both the front-end and the - back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - enabled.""" + """Whether or not public traffic can enter this workspace. True for hybrid workspaces, false + otherwise.""" region: Optional[str] = None - """The cloud region for workspaces attached to this private access settings object.""" + """The region in which this private access settings is valid""" def as_dict(self) -> dict: """Serializes the PrivateAccessSettings into a dictionary suitable for use as a JSON request body.""" @@ -1120,30 +1533,10 @@ def from_dict(cls, d: Dict[str, Any]) -> PrivateAccessSettings: ) -@dataclass -class ReplaceResponse: - def as_dict(self) -> dict: - """Serializes the ReplaceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: - """Deserializes the ReplaceResponse from a dictionary.""" - return cls() - - @dataclass class RootBucketInfo: - """Root S3 bucket information.""" - bucket_name: Optional[str] = None - """The name of the S3 bucket.""" + """Name of the bucket""" def as_dict(self) -> dict: """Serializes the RootBucketInfo into a dictionary suitable for use as a JSON request body.""" @@ -1168,18 +1561,21 @@ def from_dict(cls, d: Dict[str, Any]) -> RootBucketInfo: @dataclass class StorageConfiguration: account_id: Optional[str] = None - """The Databricks account ID that hosts the credential.""" creation_time: Optional[int] = None - """Time in epoch milliseconds when the storage configuration was created.""" + + role_arn: Optional[str] = None + """The IAM role that is used to access the workspace catalog which is created during workspace + creation for UC by Default. If a storage configuration that has this field populated is used to + create a workspace, then a workspace catalog is created together with the workspace. The + workspace catalog shares the root bucket with internal workspace storage (including DBFS root) + but uses a dedicated bucket path prefix.""" root_bucket_info: Optional[RootBucketInfo] = None storage_configuration_id: Optional[str] = None - """Databricks storage configuration ID.""" storage_configuration_name: Optional[str] = None - """The human-readable name of the storage configuration.""" def as_dict(self) -> dict: """Serializes the StorageConfiguration into a dictionary suitable for use as a JSON request body.""" @@ -1188,6 +1584,8 @@ def as_dict(self) -> dict: body["account_id"] = self.account_id if self.creation_time is not None: body["creation_time"] = self.creation_time + if self.role_arn is not None: + body["role_arn"] = self.role_arn if self.root_bucket_info: body["root_bucket_info"] = self.root_bucket_info.as_dict() if self.storage_configuration_id is not None: @@ -1203,6 +1601,8 @@ def as_shallow_dict(self) -> dict: body["account_id"] = self.account_id if self.creation_time is not None: body["creation_time"] = self.creation_time + if self.role_arn is not None: + body["role_arn"] = self.role_arn if self.root_bucket_info: body["root_bucket_info"] = self.root_bucket_info if self.storage_configuration_id is not None: @@ -1217,6 +1617,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: return cls( account_id=d.get("account_id", None), creation_time=d.get("creation_time", None), + role_arn=d.get("role_arn", None), root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), storage_configuration_id=d.get("storage_configuration_id", None), storage_configuration_name=d.get("storage_configuration_name", None), @@ -1225,12 +1626,21 @@ def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: @dataclass class StsRole: + """* Use Amazon's STS service to assume a specified IAM role. The `longLivedProvider` is required + to grant permission to assume `roleArn`. As an example, consider the vault creating the vpc in + the customer account. The customer may provide her credentials as a role that we can assume. To + create the VPC, the vault will use the "sts:AssumeRole" permission in its IAM role to assume the + customer role. In this case, the vault's role is the long lived provider. @param roleArn The + role to assume @param externalId An identifier that enables cross account role assumption @param + longLivedProvider The credentials with which to assume the role""" + external_id: Optional[str] = None - """The external ID that needs to be trusted by the cross-account role. This is always your - Databricks account ID.""" + """Note: This must match the external_id on the parent object. + + TODO(j): Add validation to ensure this cannot be updated. If the user can override the + external_id, that defeats the purpose.""" role_arn: Optional[str] = None - """The Amazon Resource Name (ARN) of the cross account role.""" def as_dict(self) -> dict: """Serializes the StsRole into a dictionary suitable for use as a JSON request body.""" @@ -1257,27 +1667,57 @@ def from_dict(cls, d: Dict[str, Any]) -> StsRole: @dataclass -class UpdateResponse: +class SubnetInfo: + """Describes a single subnet, which is associated with a particular AWS AZ and a particular address + space which is a subset of the overall vpc_address_space.""" + + availability_zone: Optional[str] = None + """Example: us-west-2a""" + + subnet_address_space: Optional[str] = None + """Example: 10.0.0.0/17.""" + + subnet_id: Optional[str] = None + def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the SubnetInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.availability_zone is not None: + body["availability_zone"] = self.availability_zone + if self.subnet_address_space is not None: + body["subnet_address_space"] = self.subnet_address_space + if self.subnet_id is not None: + body["subnet_id"] = self.subnet_id return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the SubnetInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.availability_zone is not None: + body["availability_zone"] = self.availability_zone + if self.subnet_address_space is not None: + body["subnet_address_space"] = self.subnet_address_space + if self.subnet_id is not None: + body["subnet_id"] = self.subnet_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> SubnetInfo: + """Deserializes the SubnetInfo from a dictionary.""" + return cls( + availability_zone=d.get("availability_zone", None), + subnet_address_space=d.get("subnet_address_space", None), + subnet_id=d.get("subnet_id", None), + ) @dataclass class VpcEndpoint: + """*""" + account_id: Optional[str] = None - """The Databricks account ID that hosts the VPC endpoint configuration.""" + """The Databricks account ID that hosts the VPC endpoint configuration. TODO - This may signal an + OpenAPI diff; it does not show up in the generated spec""" aws_account_id: Optional[str] = None """The AWS Account in which the VPC endpoint object exists.""" @@ -1294,6 +1734,7 @@ class VpcEndpoint: """The ID of the VPC endpoint object in AWS.""" gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None + """The cloud info of this vpc endpoint. Info for a GCP vpc endpoint.""" region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" @@ -1381,8 +1822,6 @@ def from_dict(cls, d: Dict[str, Any]) -> VpcEndpoint: class VpcStatus(Enum): - """The status of this network configuration object in terms of its use in a workspace: * - `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" BROKEN = "BROKEN" UNATTACHED = "UNATTACHED" @@ -1391,7 +1830,6 @@ class VpcStatus(Enum): class WarningType(Enum): - """The AWS resource associated with this warning: a subnet or a security group.""" SECURITY_GROUP = "securityGroup" SUBNET = "subnet" @@ -1403,7 +1841,6 @@ class Workspace: """Databricks account ID.""" aws_region: Optional[str] = None - """The AWS region of the workspace data plane (for example, `us-west-2`).""" azure_workspace_info: Optional[AzureWorkspaceInfo] = None @@ -1412,6 +1849,9 @@ class Workspace: cloud_resource_container: Optional[CloudResourceContainer] = None + compute_mode: Optional[CustomerFacingComputeMode] = None + """The compute mode of the workspace.""" + creation_time: Optional[int] = None """Time in epoch milliseconds when the workspace was created.""" @@ -1419,26 +1859,18 @@ class Workspace: """ID of the workspace's credential configuration object.""" custom_tags: Optional[Dict[str, str]] = None - """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - string of utf-8 characters. The value can be an empty string, with maximum length of 255 - characters. The key can be of maximum length of 127 characters, and cannot be empty.""" deployment_name: Optional[str] = None - """The deployment name defines part of the subdomain for the workspace. The workspace URL for web - application and REST APIs is `.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions.""" external_customer_info: Optional[ExternalCustomerInfo] = None - """If this workspace is for a external customer, then external_customer_info is populated. If this - workspace is not for a external customer, then external_customer_info is empty.""" + """maps to external_customer_info from workspace proto this will contains fields for the customers""" gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None gke_config: Optional[GkeConfig] = None is_no_public_ip_enabled: Optional[bool] = None - """Whether no public IP is enabled for the workspace.""" + """Whether No Public IP is enabled for the workspace""" location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account (for example, @@ -1447,9 +1879,17 @@ class Workspace: managed_services_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting managed services.""" + network: Optional[WorkspaceNetwork] = None + """The network configuration for the workspace. + + DEPRECATED. Use `network_id` instead.""" + + network_connectivity_config_id: Optional[str] = None + """The object ID of network connectivity config.""" + network_id: Optional[str] = None - """The network configuration ID that is attached to the workspace. This field is available only if - the network is a customer-managed network.""" + """If this workspace is BYO VPC, then the network_id will be populated. If this workspace is not + BYO VPC, then the network_id will be empty.""" pricing_tier: Optional[PricingTier] = None @@ -1469,6 +1909,9 @@ class Workspace: storage_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting workspace storage.""" + storage_mode: Optional[CustomerFacingStorageMode] = None + """The storage mode of the workspace.""" + workspace_id: Optional[int] = None """A unique integer ID for the workspace""" @@ -1476,6 +1919,7 @@ class Workspace: """The human-readable name of the workspace.""" workspace_status: Optional[WorkspaceStatus] = None + """The status of a workspace""" workspace_status_message: Optional[str] = None """Message describing the current workspace status.""" @@ -1493,6 +1937,8 @@ def as_dict(self) -> dict: body["cloud"] = self.cloud if self.cloud_resource_container: body["cloud_resource_container"] = self.cloud_resource_container.as_dict() + if self.compute_mode is not None: + body["compute_mode"] = self.compute_mode.value if self.creation_time is not None: body["creation_time"] = self.creation_time if self.credentials_id is not None: @@ -1513,6 +1959,10 @@ def as_dict(self) -> dict: body["location"] = self.location if self.managed_services_customer_managed_key_id is not None: body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network: + body["network"] = self.network.as_dict() + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id if self.network_id is not None: body["network_id"] = self.network_id if self.pricing_tier is not None: @@ -1523,6 +1973,8 @@ def as_dict(self) -> dict: body["storage_configuration_id"] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode.value if self.workspace_id is not None: body["workspace_id"] = self.workspace_id if self.workspace_name is not None: @@ -1546,6 +1998,8 @@ def as_shallow_dict(self) -> dict: body["cloud"] = self.cloud if self.cloud_resource_container: body["cloud_resource_container"] = self.cloud_resource_container + if self.compute_mode is not None: + body["compute_mode"] = self.compute_mode if self.creation_time is not None: body["creation_time"] = self.creation_time if self.credentials_id is not None: @@ -1566,6 +2020,10 @@ def as_shallow_dict(self) -> dict: body["location"] = self.location if self.managed_services_customer_managed_key_id is not None: body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network: + body["network"] = self.network + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id if self.network_id is not None: body["network_id"] = self.network_id if self.pricing_tier is not None: @@ -1576,6 +2034,8 @@ def as_shallow_dict(self) -> dict: body["storage_configuration_id"] = self.storage_configuration_id if self.storage_customer_managed_key_id is not None: body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode if self.workspace_id is not None: body["workspace_id"] = self.workspace_id if self.workspace_name is not None: @@ -1595,6 +2055,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Workspace: azure_workspace_info=_from_dict(d, "azure_workspace_info", AzureWorkspaceInfo), cloud=d.get("cloud", None), cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), + compute_mode=_enum(d, "compute_mode", CustomerFacingComputeMode), creation_time=d.get("creation_time", None), credentials_id=d.get("credentials_id", None), custom_tags=d.get("custom_tags", None), @@ -1605,11 +2066,14 @@ def from_dict(cls, d: Dict[str, Any]) -> Workspace: is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), location=d.get("location", None), managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), + network=_from_dict(d, "network", WorkspaceNetwork), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), network_id=d.get("network_id", None), pricing_tier=_enum(d, "pricing_tier", PricingTier), private_access_settings_id=d.get("private_access_settings_id", None), storage_configuration_id=d.get("storage_configuration_id", None), storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), + storage_mode=_enum(d, "storage_mode", CustomerFacingStorageMode), workspace_id=d.get("workspace_id", None), workspace_name=d.get("workspace_name", None), workspace_status=_enum(d, "workspace_status", WorkspaceStatus), @@ -1617,9 +2081,65 @@ def from_dict(cls, d: Dict[str, Any]) -> Workspace: ) +@dataclass +class WorkspaceNetwork: + """The network configuration for workspaces.""" + + gcp_common_network_config: Optional[GcpCommonNetworkConfig] = None + """The shared network config for GCP workspace. This object has common network configurations that + are network attributions of a workspace. This object is input-only.""" + + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None + """The mutually exclusive network deployment modes. The option decides which network mode the + workspace will use. The network config for GCP workspace with Databricks managed network. This + object is input-only and will not be provided when listing workspaces. See + go/gcp-byovpc-alpha-design for interface decisions.""" + + network_id: Optional[str] = None + """The ID of the network object, if the workspace is a BYOVPC workspace. This should apply to + workspaces on all clouds in internal services. In accounts-rest-api, user will use + workspace.network_id for input and output instead. Currently (2021-06-19) the network ID is only + used by GCP.""" + + def as_dict(self) -> dict: + """Serializes the WorkspaceNetwork into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.gcp_common_network_config: + body["gcp_common_network_config"] = self.gcp_common_network_config.as_dict() + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() + if self.network_id is not None: + body["network_id"] = self.network_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the WorkspaceNetwork into a shallow dictionary of its immediate attributes.""" + body = {} + if self.gcp_common_network_config: + body["gcp_common_network_config"] = self.gcp_common_network_config + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config + if self.network_id is not None: + body["network_id"] = self.network_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> WorkspaceNetwork: + """Deserializes the WorkspaceNetwork from a dictionary.""" + return cls( + gcp_common_network_config=_from_dict(d, "gcp_common_network_config", GcpCommonNetworkConfig), + gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), + network_id=d.get("network_id", None), + ) + + class WorkspaceStatus(Enum): - """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - initially. Continue to check the status until the status is `RUNNING`.""" + """The different statuses of a workspace. The following represents the current set of valid + transitions from status to status: NOT_PROVISIONED -> PROVISIONING -> CANCELLED PROVISIONING -> + RUNNING -> FAILED -> CANCELLED (note that this transition is disallowed in the MultiWorkspace + Project) RUNNING -> PROVISIONING -> BANNED -> CANCELLED FAILED -> PROVISIONING -> CANCELLED + BANNED -> RUNNING -> CANCELLED Note that a transition from any state to itself is also valid. + TODO(PLAT-5867): add a transition from CANCELLED to some other value (e.g. RECOVERING)""" BANNED = "BANNED" CANCELLING = "CANCELLING" @@ -1638,7 +2158,12 @@ class CredentialsAPI: def __init__(self, api_client): self._api = api_client - def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential: + def create( + self, + *, + aws_credentials: Optional[CreateCredentialAwsCredentials] = None, + credentials_name: Optional[str] = None, + ) -> Credential: """Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account @@ -1652,9 +2177,9 @@ def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCred [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - :param credentials_name: str + :param aws_credentials: :class:`CreateCredentialAwsCredentials` (optional) + :param credentials_name: str (optional) The human-readable name of the credential configuration object. - :param aws_credentials: :class:`CreateCredentialAwsCredentials` :returns: :class:`Credential` """ @@ -1671,29 +2196,30 @@ def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCred res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/credentials", body=body, headers=headers) return Credential.from_dict(res) - def delete(self, credentials_id: str): + def delete(self, credentials_id: str) -> Credential: """Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers ) + return Credential.from_dict(res) def get(self, credentials_id: str) -> Credential: """Gets a Databricks credential configuration object for an account, both specified by ID. :param credentials_id: str - Databricks Account API credential configuration ID + Credential configuration ID :returns: :class:`Credential` """ @@ -1708,7 +2234,7 @@ def get(self, credentials_id: str) -> Credential: return Credential.from_dict(res) def list(self) -> Iterator[Credential]: - """Gets all Databricks credential configurations associated with an account specified by ID. + """List Databricks credential configuration objects for an account, specified by ID. :returns: Iterator over :class:`Credential` @@ -1742,10 +2268,10 @@ def __init__(self, api_client): def create( self, - use_cases: List[KeyUseCase], *, aws_key_info: Optional[CreateAwsKeyInfo] = None, gcp_key_info: Optional[CreateGcpKeyInfo] = None, + use_cases: Optional[List[KeyUseCase]] = None, ) -> CustomerManagedKey: """Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's @@ -1761,10 +2287,10 @@ def create( This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - :param use_cases: List[:class:`KeyUseCase`] - The cases that the key can be used for. :param aws_key_info: :class:`CreateAwsKeyInfo` (optional) :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional) + :param use_cases: List[:class:`KeyUseCase`] (optional) + The cases that the key can be used for. :returns: :class:`CustomerManagedKey` """ @@ -1785,25 +2311,26 @@ def create( ) return CustomerManagedKey.from_dict(res) - def delete(self, customer_managed_key_id: str): + def delete(self, customer_managed_key_id: str) -> CustomerManagedKey: """Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", headers=headers, ) + return CustomerManagedKey.from_dict(res) def get(self, customer_managed_key_id: str) -> CustomerManagedKey: """Gets a customer-managed key configuration object for an account, specified by ID. This operation @@ -1837,16 +2364,7 @@ def get(self, customer_managed_key_id: str) -> CustomerManagedKey: return CustomerManagedKey.from_dict(res) def list(self) -> Iterator[CustomerManagedKey]: - """Gets all customer-managed key configuration objects for an account. If the key is specified as a - workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's - notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. - If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the - workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - - **Important**: Customer-managed keys are supported only for some deployment types, subscription types, - and AWS regions. - - This operation is available only if your account is on the E2 version of the platform. + """Lists Databricks customer-managed key configurations for an account. :returns: Iterator over :class:`CustomerManagedKey` @@ -1869,9 +2387,9 @@ def __init__(self, api_client): def create( self, - network_name: str, *, gcp_network_info: Optional[GcpNetworkInfo] = None, + network_name: Optional[str] = None, security_group_ids: Optional[List[str]] = None, subnet_ids: Optional[List[str]] = None, vpc_endpoints: Optional[NetworkVpcEndpoints] = None, @@ -1880,9 +2398,9 @@ def create( """Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - :param network_name: str - The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) + :param network_name: str (optional) + The human-readable name of the network configuration. :param security_group_ids: List[str] (optional) IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations. @@ -1891,8 +2409,8 @@ def create( network configurations. :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) :param vpc_id: str (optional) - The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations. + The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple + networks. :returns: :class:`Network` """ @@ -1917,7 +2435,7 @@ def create( res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/networks", body=body, headers=headers) return Network.from_dict(res) - def delete(self, network_id: str): + def delete(self, network_id: str) -> Network: """Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. @@ -1926,14 +2444,15 @@ def delete(self, network_id: str): :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` """ headers = { "Accept": "application/json", } - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + res = self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + return Network.from_dict(res) def get(self, network_id: str) -> Network: """Gets a Databricks network configuration, which represents a cloud VPC and its resources. @@ -1952,9 +2471,7 @@ def get(self, network_id: str) -> Network: return Network.from_dict(res) def list(self) -> Iterator[Network]: - """Gets a list of all Databricks network configurations for an account, specified by ID. - - This operation is available only if your account is on the E2 version of the platform. + """Lists Databricks network configurations for an account. :returns: Iterator over :class:`Network` @@ -1976,48 +2493,28 @@ def __init__(self, api_client): def create( self, - private_access_settings_name: str, - region: str, *, allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, + private_access_settings_name: Optional[str] = None, public_access_enabled: Optional[bool] = None, + region: Optional[str] = None, ) -> PrivateAccessSettings: - """Creates a private access settings object, which specifies how your workspace is accessed over [AWS - PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object - referenced by ID in the workspace's `private_access_settings_id` property. - - You can share one private access settings with multiple workspaces in a single account. However, - private access settings are specific to AWS regions, so only workspaces in the same AWS region can use - a given private access settings object. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Creates a private access settings configuration, which represents network access restrictions for + workspace resources. Private access settings configure whether workspaces can be accessed from the + public internet or only from private endpoints. - :param private_access_settings_name: str - The human-readable name of the private access settings object. - :param region: str - The cloud region for workspaces associated with this private access settings object. :param allowed_vpc_endpoint_ids: List[str] (optional) - An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering - the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in - AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints - that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via public - internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html + The MWS API ID of VPC Endpoints that can access this workspace - only filled if privateAccessLevel + is ENDPOINT :param private_access_level: :class:`PrivateAccessLevel` (optional) + The level of isolation of a workspace attached to this settings object + :param private_access_settings_name: str (optional) + The friendly user-facing name of the Private Access Settings (i.e. jake's private access settings) :param public_access_enabled: bool (optional) - Determines if the workspace can be accessed over public internet. For fully private workspaces, you - can optionally specify `false`, but only if you implement both the front-end and the back-end - PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. + Whether or not public traffic can enter this workspace. True for hybrid workspaces, false otherwise. + :param region: str (optional) + The region in which this private access settings is valid :returns: :class:`PrivateAccessSettings` """ @@ -2042,42 +2539,29 @@ def create( ) return PrivateAccessSettings.from_dict(res) - def delete(self, private_access_settings_id: str): - """Deletes a private access settings object, which determines how your workspace is accessed over [AWS - PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + def delete(self, private_access_settings_id: str) -> PrivateAccessSettings: + """Deletes a Databricks private access settings configuration, both specified by ID. :param private_access_settings_id: str - Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", headers=headers, ) + return PrivateAccessSettings.from_dict(res) def get(self, private_access_settings_id: str) -> PrivateAccessSettings: - """Gets a private access settings object, which specifies how your workspace is accessed over [AWS - PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Gets a Databricks private access settings configuration, both specified by ID. :param private_access_settings_id: str - Databricks Account API private access settings ID. :returns: :class:`PrivateAccessSettings` """ @@ -2094,7 +2578,7 @@ def get(self, private_access_settings_id: str) -> PrivateAccessSettings: return PrivateAccessSettings.from_dict(res) def list(self) -> Iterator[PrivateAccessSettings]: - """Gets a list of all private access settings objects for an account, specified by ID. + """Lists Databricks private access settings for an account. :returns: Iterator over :class:`PrivateAccessSettings` @@ -2108,82 +2592,29 @@ def list(self) -> Iterator[PrivateAccessSettings]: return [PrivateAccessSettings.from_dict(v) for v in res] def replace( - self, - private_access_settings_id: str, - private_access_settings_name: str, - region: str, - *, - allowed_vpc_endpoint_ids: Optional[List[str]] = None, - private_access_level: Optional[PrivateAccessLevel] = None, - public_access_enabled: Optional[bool] = None, - ): - """Updates an existing private access settings object, which specifies how your workspace is accessed - over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object - referenced by ID in the workspace's `private_access_settings_id` property. - - This operation completely overwrites your existing private access settings object attached to your - workspaces. All workspaces attached to the private access settings are affected by any change. If - `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of - these changes might take several minutes to propagate to the workspace API. - - You can share one private access settings object with multiple workspaces in a single account. - However, private access settings are specific to AWS regions, so only workspaces in the same AWS - region can use a given private access settings object. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + self, private_access_settings_id: str, customer_facing_private_access_settings: PrivateAccessSettings + ) -> PrivateAccessSettings: + """Updates a Databricks private access settings configuration, both specified by ID. :param private_access_settings_id: str - Databricks Account API private access settings ID. - :param private_access_settings_name: str - The human-readable name of the private access settings object. - :param region: str - The cloud region for workspaces associated with this private access settings object. - :param allowed_vpc_endpoint_ids: List[str] (optional) - An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering - the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in - AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints - that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via public - internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html - :param private_access_level: :class:`PrivateAccessLevel` (optional) - :param public_access_enabled: bool (optional) - Determines if the workspace can be accessed over public internet. For fully private workspaces, you - can optionally specify `false`, but only if you implement both the front-end and the back-end - PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + The ID in the MWS API of the Private Access Settings. + :param customer_facing_private_access_settings: :class:`PrivateAccessSettings` + :returns: :class:`PrivateAccessSettings` """ - body = {} - if allowed_vpc_endpoint_ids is not None: - body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: - body["private_access_level"] = private_access_level.value - if private_access_settings_name is not None: - body["private_access_settings_name"] = private_access_settings_name - if public_access_enabled is not None: - body["public_access_enabled"] = public_access_enabled - if region is not None: - body["region"] = region + body = customer_facing_private_access_settings.as_dict() headers = { "Accept": "application/json", "Content-Type": "application/json", } - self._api.do( + res = self._api.do( "PUT", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", body=body, headers=headers, ) + return PrivateAccessSettings.from_dict(res) class StorageAPI: @@ -2195,20 +2626,13 @@ class StorageAPI: def __init__(self, api_client): self._api = api_client - def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration: - """Creates new storage configuration for an account, specified by ID. Uploads a storage configuration - object that represents the root AWS S3 bucket in your account. Databricks stores related workspace - assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the - required bucket policy. - - For information about how to create a new workspace with this API, see [Create a new workspace using - the Account API] - - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html + def create( + self, *, root_bucket_info: Optional[RootBucketInfo] = None, storage_configuration_name: Optional[str] = None + ) -> StorageConfiguration: + """Creates a Databricks storage configuration for an account. - :param storage_configuration_name: str - The human-readable name of the storage configuration. - :param root_bucket_info: :class:`RootBucketInfo` + :param root_bucket_info: :class:`RootBucketInfo` (optional) + :param storage_configuration_name: str (optional) :returns: :class:`StorageConfiguration` """ @@ -2227,31 +2651,30 @@ def create(self, storage_configuration_name: str, root_bucket_info: RootBucketIn ) return StorageConfiguration.from_dict(res) - def delete(self, storage_configuration_id: str): + def delete(self, storage_configuration_id: str) -> StorageConfiguration: """Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. :param storage_configuration_id: str - Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", headers=headers, ) + return StorageConfiguration.from_dict(res) def get(self, storage_configuration_id: str) -> StorageConfiguration: """Gets a Databricks storage configuration for an account, both specified by ID. :param storage_configuration_id: str - Databricks Account API storage configuration ID. :returns: :class:`StorageConfiguration` """ @@ -2268,7 +2691,7 @@ def get(self, storage_configuration_id: str) -> StorageConfiguration: return StorageConfiguration.from_dict(res) def list(self) -> Iterator[StorageConfiguration]: - """Gets a list of all Databricks storage configurations for your account, specified by ID. + """Lists Databricks storage configurations for an account, specified by ID. :returns: Iterator over :class:`StorageConfiguration` @@ -2290,11 +2713,11 @@ def __init__(self, api_client): def create( self, - vpc_endpoint_name: str, *, aws_vpc_endpoint_id: Optional[str] = None, gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, region: Optional[str] = None, + vpc_endpoint_name: Optional[str] = None, ) -> VpcEndpoint: """Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -2309,13 +2732,14 @@ def create( [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - :param vpc_endpoint_name: str - The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) The ID of the VPC endpoint object in AWS. :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional) + The cloud info of this vpc endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. + :param vpc_endpoint_name: str (optional) + The human-readable name of the storage configuration. :returns: :class:`VpcEndpoint` """ @@ -2338,29 +2762,23 @@ def create( ) return VpcEndpoint.from_dict(res) - def delete(self, vpc_endpoint_id: str): - """Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate - privately with Databricks over [AWS PrivateLink]. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink - [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + def delete(self, vpc_endpoint_id: str) -> VpcEndpoint: + """Deletes a Databricks VPC endpoint configuration. You cannot delete a VPC endpoint configuration that + is associated with any workspace. :param vpc_endpoint_id: str - Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` """ headers = { "Accept": "application/json", } - self._api.do( + res = self._api.do( "DELETE", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers ) + return VpcEndpoint.from_dict(res) def get(self, vpc_endpoint_id: str) -> VpcEndpoint: """Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate @@ -2385,11 +2803,7 @@ def get(self, vpc_endpoint_id: str) -> VpcEndpoint: return VpcEndpoint.from_dict(res) def list(self) -> Iterator[VpcEndpoint]: - """Gets a list of all VPC endpoints for an account, specified by ID. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + """Lists Databricks VPC endpoint configurations for an account. :returns: Iterator over :class:`VpcEndpoint` @@ -2448,7 +2862,6 @@ def wait_get_workspace_running( def create( self, - workspace_name: str, *, aws_region: Optional[str] = None, cloud: Optional[str] = None, @@ -2466,81 +2879,71 @@ def create( private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + workspace_name: Optional[str] = None, ) -> Wait[Workspace]: - """Creates a new workspace. + """Creates a new workspace using a credential configuration and a storage configuration, an optional + network configuration (if using a customer-managed VPC), an optional managed services key + configuration (if using customer-managed keys for managed services), and an optional storage key + configuration (if using customer-managed keys for storage). The key configurations used for managed + services and storage encryption can be the same or different. + + Important: This operation is asynchronous. A response with HTTP status code 200 means the request has + been accepted and is in progress, but does not mean that the workspace deployed successfully and is + running. The initial workspace status is typically PROVISIONING. Use the workspace ID (workspace_id) + field in the response to identify the new workspace and make repeated GET requests with the workspace + ID and check its status. The workspace becomes available when the status changes to RUNNING. + + You can share one customer-managed VPC with multiple workspaces in a single account. It is not + required to create a new VPC for each workspace. However, you cannot reuse subnets or Security Groups + between workspaces. If you plan to share one VPC with multiple workspaces, make sure you size your VPC + and subnets accordingly. Because a Databricks Account API network configuration encapsulates this + information, you cannot reuse a Databricks Account API network configuration across workspaces. + + For information about how to create a new workspace with this API including error handling, see + [Create a new workspace using the Account API]. - **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request - has been accepted and is in progress, but does not mean that the workspace deployed successfully and - is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID - (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests - with the workspace ID and check its status. The workspace becomes available when the status changes to - `RUNNING`. + Important: Customer-managed VPCs, PrivateLink, and customer-managed keys are supported on a limited + set of deployment and subscription types. If you have questions about availability, contact your + Databricks representative. + + This operation is available only if your account is on the E2 version of the platform or on a select + custom plan that allows multiple workspaces per account. + + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - :param workspace_name: str - The workspace's human-readable name. :param aws_region: str (optional) - The AWS region of the workspace's data plane. :param cloud: str (optional) - The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to - `gcp`. + The cloud name. This field always has the value `gcp`. :param cloud_resource_container: :class:`CloudResourceContainer` (optional) :param credentials_id: str (optional) ID of the workspace's credential configuration object. :param custom_tags: Dict[str,str] (optional) - The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string - of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The - key can be of maximum length of 127 characters, and cannot be empty. :param deployment_name: str (optional) - The deployment name defines part of the subdomain for the workspace. The workspace URL for the web - application and REST APIs is `.cloud.databricks.com`. For example, if the - deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. - Hyphens are allowed. This property supports only the set of characters that are allowed in a - subdomain. - - To set this value, you must have a deployment name prefix. Contact your Databricks account team to - add an account deployment name prefix to your account. - - Workspace deployment names follow the account prefix and a hyphen. For example, if your account's - deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response - for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be - `acme-workspace-1.cloud.databricks.com`. - - You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment - name to only include the deployment prefix. For example, if your account's deployment prefix is - `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and - the workspace URL is `acme.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions. - - If a new workspace omits this property, the server generates a unique deployment name for you with - the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) :param gke_config: :class:`GkeConfig` (optional) :param is_no_public_ip_enabled: bool (optional) - Whether no public IP is enabled for the workspace. + Whether No Public IP is enabled for the workspace :param location: str (optional) - The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. + The Google Cloud region of the workspace data plane in your Google account (for example, + `us-east4`). :param managed_services_customer_managed_key_id: str (optional) - The ID of the workspace's managed services encryption key configuration object. This is used to help - protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query - history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. + ID of the key configuration for encrypting managed services. :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) :param private_access_settings_id: str (optional) - ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be - specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), - back-end (data plane to control plane connection), or both connection types. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", + ID of the workspace's private access settings object. Only used for PrivateLink. You must specify + this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace connection), + back-end (data plane to control plane connection), or both connection types. Before configuring + PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) - The ID of the workspace's storage configuration object. + ID of the workspace's storage configuration object. :param storage_customer_managed_key_id: str (optional) - The ID of the workspace's storage encryption key configuration object. This is used to encrypt the - workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The - provided key configuration object property `use_cases` must contain `STORAGE`. + ID of the key configuration for encrypting workspace storage. + :param workspace_name: str (optional) + The human-readable name of the workspace. :returns: Long-running operation waiter for :class:`Workspace`. @@ -2597,7 +3000,6 @@ def create( def create_and_wait( self, - workspace_name: str, *, aws_region: Optional[str] = None, cloud: Optional[str] = None, @@ -2615,6 +3017,7 @@ def create_and_wait( private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + workspace_name: Optional[str] = None, timeout=timedelta(minutes=20), ) -> Workspace: return self.create( @@ -2637,42 +3040,34 @@ def create_and_wait( workspace_name=workspace_name, ).result(timeout=timeout) - def delete(self, workspace_id: int): - """Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. - However, it might take a few minutes for all workspaces resources to be deleted, depending on the size - and number of workspace resources. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + def delete(self, workspace_id: int) -> Workspace: + """Deletes a Databricks workspace, both specified by ID. :param workspace_id: int - Workspace ID. - + :returns: :class:`Workspace` """ headers = { "Accept": "application/json", } - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers) + res = self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers + ) + return Workspace.from_dict(res) def get(self, workspace_id: int) -> Workspace: """Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace - becomes available when the status changes to `RUNNING`. - - For information about how to create a new workspace with this API **including error handling**, see - [Create a new workspace using the Account API]. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + becomes available when the status changes to `RUNNING`. For information about how to create a new + workspace with this API **including error handling**, see [Create a new workspace using the Account + API]. [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html :param workspace_id: int - Workspace ID. :returns: :class:`Workspace` """ @@ -2687,10 +3082,7 @@ def get(self, workspace_id: int) -> Workspace: return Workspace.from_dict(res) def list(self) -> Iterator[Workspace]: - """Gets a list of all workspaces associated with an account, specified by ID. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. + """Lists Databricks workspaces for an account. :returns: Iterator over :class:`Workspace` @@ -2704,202 +3096,58 @@ def list(self) -> Iterator[Workspace]: return [Workspace.from_dict(v) for v in res] def update( - self, - workspace_id: int, - *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, + self, workspace_id: int, customer_facing_workspace: Workspace, *, update_mask: Optional[str] = None ) -> Wait[Workspace]: - """Updates a workspace configuration for either a running workspace or a failed workspace. The elements - that can be updated varies between these two use cases. - - ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace - deployment for some fields, but not all fields. For a failed workspace, this request supports updates - to the following fields only: - Credential configuration ID - Storage configuration ID - Network - configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a - failed workspace only, you can convert a workspace with Databricks-managed VPC to use a - customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC - to be a Databricks-managed VPC. You can update the network configuration for a failed or running - workspace to add PrivateLink support, though you must also add a private access settings object. - Key - configuration ID for managed services (control plane storage, such as notebook source and Databricks - SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID - for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use - customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running - state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID - for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update - the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both - types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink - support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or - update the network connectivity configuration ID to ensure the workspace uses the same set of stable - IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from - the workspace once attached, you can only switch to another one. - - After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` - requests with the workspace ID and check the workspace status. The workspace is successful if the - status changes to `RUNNING`. - - For information about how to create a new workspace with this API **including error handling**, see - [Create a new workspace using the Account API]. - - ### Update a running workspace You can update a Databricks workspace configuration for running - workspaces for some fields, but not all fields. For a running workspace, this request supports - updating the following fields only: - Credential configuration ID - Network configuration ID. Used - only if you already use a customer-managed VPC. You cannot convert a running workspace from a - Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this - API for a failed or running workspace to add support for PrivateLink, although you also need to add a - private access settings object. - Key configuration ID for managed services (control plane storage, - such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data - with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) - that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to - encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK - for managed services, adding this ID enables managed services encryption for new or updated data. - Existing managed services data that existed before adding the key remains not encrypted with the DEK - until it is modified. If the workspace already has customer-managed keys for managed services, this - request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key - configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this - only if the workspace does not already have a customer-managed key configuration for workspace - storage. - Private access settings ID to add PrivateLink support. You can add or update the private - access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of - connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on - a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. - - Network connectivity configuration ID to add serverless stable IP support. You can add or update the - network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR - blocks to access your resources. You cannot remove a network connectivity configuration from the - workspace once attached, you can only switch to another one. - - **Important**: To update a running workspace, your workspace must have no running compute resources - that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose - clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not - terminate all cluster instances in the workspace before calling this API, the request will fail. - - ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace - configuration, make repeated `GET` requests with the workspace ID and check the workspace status and - the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes - `PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the - workspace status changes to `RUNNING`. Note that you can also check the workspace status in the - [Account Console]. However, you cannot use or create clusters for another 20 minutes after that status - change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create - or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could - cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status - stays at status `RUNNING` and the VPC change happens immediately. A change to the storage - customer-managed key configuration ID might take a few minutes to update, so continue to check the - workspace until you observe that it has been updated. If the update fails, the workspace might revert - silently to its original configuration. After the workspace has been updated, you cannot use or create - clusters for another 20 minutes. If you create or use clusters before this time interval elapses, - clusters do not launch successfully, fail, or could cause other unexpected behavior. - - If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes - to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to - the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 - minute wait. - - **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment - types and subscription types. If you have questions about availability, contact your Databricks - representative. - - This operation is available only if your account is on the E2 version of the platform or on a select - custom plan that allows multiple workspaces per account. - - [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html + """Updates a workspace. :param workspace_id: int - Workspace ID. - :param aws_region: str (optional) - The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available - only for updating failed workspaces. - :param credentials_id: str (optional) - ID of the workspace's credential configuration object. This parameter is available for updating both - failed and running workspaces. - :param custom_tags: Dict[str,str] (optional) - The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string - of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The - key can be of maximum length of 127 characters, and cannot be empty. - :param managed_services_customer_managed_key_id: str (optional) - The ID of the workspace's managed services encryption key configuration object. This parameter is - available only for updating failed workspaces. - :param network_connectivity_config_id: str (optional) - :param network_id: str (optional) - The ID of the workspace's network configuration object. Used only if you already use a - customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a - customer-managed VPC by updating the workspace to add a network configuration ID. - :param private_access_settings_id: str (optional) - The ID of the workspace's private access settings configuration object. This parameter is available - only for updating failed workspaces. - :param storage_configuration_id: str (optional) - The ID of the workspace's storage configuration object. This parameter is available only for - updating failed workspaces. - :param storage_customer_managed_key_id: str (optional) - The ID of the key configuration object for workspace storage. This parameter is available for - updating both failed and running workspaces. + A unique integer ID for the workspace + :param customer_facing_workspace: :class:`Workspace` + :param update_mask: str (optional) + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ - body = {} - if aws_region is not None: - body["aws_region"] = aws_region - if credentials_id is not None: - body["credentials_id"] = credentials_id - if custom_tags is not None: - body["custom_tags"] = custom_tags - if managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id - if network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = network_connectivity_config_id - if network_id is not None: - body["network_id"] = network_id - if private_access_settings_id is not None: - body["private_access_settings_id"] = private_access_settings_id - if storage_configuration_id is not None: - body["storage_configuration_id"] = storage_configuration_id - if storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = storage_customer_managed_key_id + body = customer_facing_workspace.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask headers = { "Accept": "application/json", "Content-Type": "application/json", } op_response = self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", body=body, headers=headers + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", + query=query, + body=body, + headers=headers, ) return Wait( - self.wait_get_workspace_running, response=UpdateResponse.from_dict(op_response), workspace_id=workspace_id + self.wait_get_workspace_running, + response=Workspace.from_dict(op_response), + workspace_id=op_response["workspace_id"], ) def update_and_wait( self, workspace_id: int, + customer_facing_workspace: Workspace, *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, + update_mask: Optional[str] = None, timeout=timedelta(minutes=20), ) -> Workspace: return self.update( - aws_region=aws_region, - credentials_id=credentials_id, - custom_tags=custom_tags, - managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, - network_connectivity_config_id=network_connectivity_config_id, - network_id=network_id, - private_access_settings_id=private_access_settings_id, - storage_configuration_id=storage_configuration_id, - storage_customer_managed_key_id=storage_customer_managed_key_id, - workspace_id=workspace_id, + customer_facing_workspace=customer_facing_workspace, update_mask=update_mask, workspace_id=workspace_id ).result(timeout=timeout) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 7370c1138..f707aadf7 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -3382,6 +3382,9 @@ class ServingEndpoint: task: Optional[str] = None """The task type of the serving endpoint.""" + usage_policy_id: Optional[str] = None + """The usage policy associated with serving endpoint.""" + def as_dict(self) -> dict: """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} @@ -3409,6 +3412,8 @@ def as_dict(self) -> dict: body["tags"] = [v.as_dict() for v in self.tags] if self.task is not None: body["task"] = self.task + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -3438,6 +3443,8 @@ def as_shallow_dict(self) -> dict: body["tags"] = self.tags if self.task is not None: body["task"] = self.task + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -3456,6 +3463,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpoint: state=_from_dict(d, "state", EndpointState), tags=_repeated_dict(d, "tags", EndpointTag), task=d.get("task", None), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -3897,6 +3905,38 @@ def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, "routes", Route)) +@dataclass +class UpdateInferenceEndpointNotificationsResponse: + email_notifications: Optional[EmailNotifications] = None + + name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateInferenceEndpointNotificationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateInferenceEndpointNotificationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateInferenceEndpointNotificationsResponse: + """Deserializes the UpdateInferenceEndpointNotificationsResponse from a dictionary.""" + return cls( + email_notifications=_from_dict(d, "email_notifications", EmailNotifications), name=d.get("name", None) + ) + + @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -4482,6 +4522,7 @@ def query( self, name: str, *, + client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4495,11 +4536,15 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, + usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. + :param client_request_id: str (optional) + Optional user-provided request identifier that will be recorded in the inference table and the usage + tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4541,10 +4586,14 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. + :param usage_context: Dict[str,str] (optional) + Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} + if client_request_id is not None: + body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4571,6 +4620,8 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature + if usage_context is not None: + body["usage_context"] = usage_context headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -4687,6 +4738,30 @@ def update_config_and_wait( traffic_config=traffic_config, ).result(timeout=timeout) + def update_notifications( + self, name: str, *, email_notifications: Optional[EmailNotifications] = None + ) -> UpdateInferenceEndpointNotificationsResponse: + """Updates the email and webhook notification settings for an endpoint. + + :param name: str + The name of the serving endpoint whose notifications are being updated. This field is required. + :param email_notifications: :class:`EmailNotifications` (optional) + The email notification settings to update. Specify email addresses to notify when endpoint state + changes occur. + + :returns: :class:`UpdateInferenceEndpointNotificationsResponse` + """ + body = {} + if email_notifications is not None: + body["email_notifications"] = email_notifications.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/serving-endpoints/{name}/notifications", body=body, headers=headers) + return UpdateInferenceEndpointNotificationsResponse.from_dict(res) + def update_permissions( self, serving_endpoint_id: str, @@ -4785,6 +4860,7 @@ def query( self, name: str, *, + client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4798,11 +4874,15 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, + usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. + :param client_request_id: str (optional) + Optional user-provided request identifier that will be recorded in the inference table and the usage + tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4844,10 +4924,14 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. + :param usage_context: Dict[str,str] (optional) + Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} + if client_request_id is not None: + body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4874,6 +4958,8 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature + if usage_context is not None: + body["usage_context"] = usage_context data_plane_info = self._data_plane_info_query( name=name, ) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 975860d8a..3004f17da 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -3587,8 +3587,32 @@ def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredWorkspace: @dataclass class MicrosoftTeamsConfig: + app_id: Optional[str] = None + """[Input-Only] App ID for Microsoft Teams App.""" + + app_id_set: Optional[bool] = None + """[Output-Only] Whether App ID is set.""" + + auth_secret: Optional[str] = None + """[Input-Only] Secret for Microsoft Teams App authentication.""" + + auth_secret_set: Optional[bool] = None + """[Output-Only] Whether secret is set.""" + + channel_url: Optional[str] = None + """[Input-Only] Channel URL for Microsoft Teams App.""" + + channel_url_set: Optional[bool] = None + """[Output-Only] Whether Channel URL is set.""" + + tenant_id: Optional[str] = None + """[Input-Only] Tenant ID for Microsoft Teams App.""" + + tenant_id_set: Optional[bool] = None + """[Output-Only] Whether Tenant ID is set.""" + url: Optional[str] = None - """[Input-Only] URL for Microsoft Teams.""" + """[Input-Only] URL for Microsoft Teams webhook.""" url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" @@ -3596,6 +3620,22 @@ class MicrosoftTeamsConfig: def as_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.app_id is not None: + body["app_id"] = self.app_id + if self.app_id_set is not None: + body["app_id_set"] = self.app_id_set + if self.auth_secret is not None: + body["auth_secret"] = self.auth_secret + if self.auth_secret_set is not None: + body["auth_secret_set"] = self.auth_secret_set + if self.channel_url is not None: + body["channel_url"] = self.channel_url + if self.channel_url_set is not None: + body["channel_url_set"] = self.channel_url_set + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.tenant_id_set is not None: + body["tenant_id_set"] = self.tenant_id_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -3605,6 +3645,22 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.app_id is not None: + body["app_id"] = self.app_id + if self.app_id_set is not None: + body["app_id_set"] = self.app_id_set + if self.auth_secret is not None: + body["auth_secret"] = self.auth_secret + if self.auth_secret_set is not None: + body["auth_secret_set"] = self.auth_secret_set + if self.channel_url is not None: + body["channel_url"] = self.channel_url + if self.channel_url_set is not None: + body["channel_url_set"] = self.channel_url_set + if self.tenant_id is not None: + body["tenant_id"] = self.tenant_id + if self.tenant_id_set is not None: + body["tenant_id_set"] = self.tenant_id_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -3614,7 +3670,18 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> MicrosoftTeamsConfig: """Deserializes the MicrosoftTeamsConfig from a dictionary.""" - return cls(url=d.get("url", None), url_set=d.get("url_set", None)) + return cls( + app_id=d.get("app_id", None), + app_id_set=d.get("app_id_set", None), + auth_secret=d.get("auth_secret", None), + auth_secret_set=d.get("auth_secret_set", None), + channel_url=d.get("channel_url", None), + channel_url_set=d.get("channel_url_set", None), + tenant_id=d.get("tenant_id", None), + tenant_id_set=d.get("tenant_id_set", None), + url=d.get("url", None), + url_set=d.get("url_set", None), + ) @dataclass @@ -4617,6 +4684,18 @@ def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: @dataclass class SlackConfig: + channel_id: Optional[str] = None + """[Input-Only] Slack channel ID for notifications.""" + + channel_id_set: Optional[bool] = None + """[Output-Only] Whether channel ID is set.""" + + oauth_token: Optional[str] = None + """[Input-Only] OAuth token for Slack authentication.""" + + oauth_token_set: Optional[bool] = None + """[Output-Only] Whether OAuth token is set.""" + url: Optional[str] = None """[Input-Only] URL for Slack destination.""" @@ -4626,6 +4705,14 @@ class SlackConfig: def as_dict(self) -> dict: """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.channel_id is not None: + body["channel_id"] = self.channel_id + if self.channel_id_set is not None: + body["channel_id_set"] = self.channel_id_set + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token + if self.oauth_token_set is not None: + body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4635,6 +4722,14 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.channel_id is not None: + body["channel_id"] = self.channel_id + if self.channel_id_set is not None: + body["channel_id_set"] = self.channel_id_set + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token + if self.oauth_token_set is not None: + body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4644,7 +4739,14 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" - return cls(url=d.get("url", None), url_set=d.get("url_set", None)) + return cls( + channel_id=d.get("channel_id", None), + channel_id_set=d.get("channel_id_set", None), + oauth_token=d.get("oauth_token", None), + oauth_token_set=d.get("oauth_token_set", None), + url=d.get("url", None), + url_set=d.get("url_set", None), + ) @dataclass diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py index 9f58d1caf..babfb1a09 100755 --- a/databricks/sdk/service/settingsv2.py +++ b/databricks/sdk/service/settingsv2.py @@ -4,9 +4,10 @@ import logging from dataclasses import dataclass +from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from ._internal import _from_dict, _repeated_dict +from ._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") @@ -14,6 +15,63 @@ # all definitions in this file are in alphabetical order +@dataclass +class AibiDashboardEmbeddingAccessPolicy: + access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_policy_type is not None: + body["access_policy_type"] = self.access_policy_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_policy_type is not None: + body["access_policy_type"] = self.access_policy_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicy: + """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary.""" + return cls( + access_policy_type=_enum(d, "access_policy_type", AibiDashboardEmbeddingAccessPolicyAccessPolicyType) + ) + + +class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): + + ALLOW_ALL_DOMAINS = "ALLOW_ALL_DOMAINS" + ALLOW_APPROVED_DOMAINS = "ALLOW_APPROVED_DOMAINS" + DENY_ALL_DOMAINS = "DENY_ALL_DOMAINS" + + +@dataclass +class AibiDashboardEmbeddingApprovedDomains: + approved_domains: Optional[List[str]] = None + + def as_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.approved_domains: + body["approved_domains"] = [v for v in self.approved_domains] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes.""" + body = {} + if self.approved_domains: + body["approved_domains"] = self.approved_domains + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomains: + """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary.""" + return cls(approved_domains=d.get("approved_domains", None)) + + @dataclass class BooleanMessage: value: Optional[bool] = None @@ -38,6 +96,232 @@ def from_dict(cls, d: Dict[str, Any]) -> BooleanMessage: return cls(value=d.get("value", None)) +@dataclass +class ClusterAutoRestartMessage: + can_toggle: Optional[bool] = None + + enabled: Optional[bool] = None + + enablement_details: Optional[ClusterAutoRestartMessageEnablementDetails] = None + + maintenance_window: Optional[ClusterAutoRestartMessageMaintenanceWindow] = None + + restart_even_if_no_updates_available: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.can_toggle is not None: + body["can_toggle"] = self.can_toggle + if self.enabled is not None: + body["enabled"] = self.enabled + if self.enablement_details: + body["enablement_details"] = self.enablement_details.as_dict() + if self.maintenance_window: + body["maintenance_window"] = self.maintenance_window.as_dict() + if self.restart_even_if_no_updates_available is not None: + body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.can_toggle is not None: + body["can_toggle"] = self.can_toggle + if self.enabled is not None: + body["enabled"] = self.enabled + if self.enablement_details: + body["enablement_details"] = self.enablement_details + if self.maintenance_window: + body["maintenance_window"] = self.maintenance_window + if self.restart_even_if_no_updates_available is not None: + body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessage: + """Deserializes the ClusterAutoRestartMessage from a dictionary.""" + return cls( + can_toggle=d.get("can_toggle", None), + enabled=d.get("enabled", None), + enablement_details=_from_dict(d, "enablement_details", ClusterAutoRestartMessageEnablementDetails), + maintenance_window=_from_dict(d, "maintenance_window", ClusterAutoRestartMessageMaintenanceWindow), + restart_even_if_no_updates_available=d.get("restart_even_if_no_updates_available", None), + ) + + +@dataclass +class ClusterAutoRestartMessageEnablementDetails: + """Contains an information about the enablement status judging (e.g. whether the enterprise tier is + enabled) This is only additional information that MUST NOT be used to decide whether the setting + is enabled or not. This is intended to use only for purposes like showing an error message to + the customer with the additional details. For example, using these details we can check why + exactly the feature is disabled for this customer.""" + + forced_for_compliance_mode: Optional[bool] = None + """The feature is force enabled if compliance mode is active""" + + unavailable_for_disabled_entitlement: Optional[bool] = None + """The feature is unavailable if the corresponding entitlement disabled (see + getShieldEntitlementEnable)""" + + unavailable_for_non_enterprise_tier: Optional[bool] = None + """The feature is unavailable if the customer doesn't have enterprise tier""" + + def as_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageEnablementDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.forced_for_compliance_mode is not None: + body["forced_for_compliance_mode"] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: + body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: + body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.forced_for_compliance_mode is not None: + body["forced_for_compliance_mode"] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: + body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: + body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageEnablementDetails: + """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary.""" + return cls( + forced_for_compliance_mode=d.get("forced_for_compliance_mode", None), + unavailable_for_disabled_entitlement=d.get("unavailable_for_disabled_entitlement", None), + unavailable_for_non_enterprise_tier=d.get("unavailable_for_non_enterprise_tier", None), + ) + + +@dataclass +class ClusterAutoRestartMessageMaintenanceWindow: + week_day_based_schedule: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule] = None + + def as_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.week_day_based_schedule: + body["week_day_based_schedule"] = self.week_day_based_schedule.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.week_day_based_schedule: + body["week_day_based_schedule"] = self.week_day_based_schedule + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindow: + """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary.""" + return cls( + week_day_based_schedule=_from_dict( + d, "week_day_based_schedule", ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule + ) + ) + + +class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): + + FRIDAY = "FRIDAY" + MONDAY = "MONDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + THURSDAY = "THURSDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + + +@dataclass +class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: + day_of_week: Optional[ClusterAutoRestartMessageMaintenanceWindowDayOfWeek] = None + + frequency: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency] = None + + window_start_time: Optional[ClusterAutoRestartMessageMaintenanceWindowWindowStartTime] = None + + def as_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.day_of_week is not None: + body["day_of_week"] = self.day_of_week.value + if self.frequency is not None: + body["frequency"] = self.frequency.value + if self.window_start_time: + body["window_start_time"] = self.window_start_time.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.day_of_week is not None: + body["day_of_week"] = self.day_of_week + if self.frequency is not None: + body["frequency"] = self.frequency + if self.window_start_time: + body["window_start_time"] = self.window_start_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: + """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary.""" + return cls( + day_of_week=_enum(d, "day_of_week", ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), + frequency=_enum(d, "frequency", ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), + window_start_time=_from_dict( + d, "window_start_time", ClusterAutoRestartMessageMaintenanceWindowWindowStartTime + ), + ) + + +class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): + + EVERY_WEEK = "EVERY_WEEK" + FIRST_AND_THIRD_OF_MONTH = "FIRST_AND_THIRD_OF_MONTH" + FIRST_OF_MONTH = "FIRST_OF_MONTH" + FOURTH_OF_MONTH = "FOURTH_OF_MONTH" + SECOND_AND_FOURTH_OF_MONTH = "SECOND_AND_FOURTH_OF_MONTH" + SECOND_OF_MONTH = "SECOND_OF_MONTH" + THIRD_OF_MONTH = "THIRD_OF_MONTH" + + +@dataclass +class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: + hours: Optional[int] = None + + minutes: Optional[int] = None + + def as_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.hours is not None: + body["hours"] = self.hours + if self.minutes is not None: + body["minutes"] = self.minutes + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes.""" + body = {} + if self.hours is not None: + body["hours"] = self.hours + if self.minutes is not None: + body["minutes"] = self.minutes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: + """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary.""" + return cls(hours=d.get("hours", None), minutes=d.get("minutes", None)) + + @dataclass class IntegerMessage: value: Optional[int] = None @@ -134,14 +418,95 @@ def from_dict(cls, d: Dict[str, Any]) -> ListWorkspaceSettingsMetadataResponse: ) +@dataclass +class PersonalComputeMessage: + value: Optional[PersonalComputeMessagePersonalComputeMessageEnum] = None + + def as_dict(self) -> dict: + """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: + body["value"] = self.value.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeMessage: + """Deserializes the PersonalComputeMessage from a dictionary.""" + return cls(value=_enum(d, "value", PersonalComputeMessagePersonalComputeMessageEnum)) + + +class PersonalComputeMessagePersonalComputeMessageEnum(Enum): + """ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing + all users to create single-machine compute resources. DELEGATE: Moves access control for the + Personal Compute default policy to individual workspaces and requires a workspace’s users or + groups to be added to the ACLs of that workspace’s Personal Compute default policy before they + will be able to create compute resources through that policy.""" + + DELEGATE = "DELEGATE" + ON = "ON" + + +@dataclass +class RestrictWorkspaceAdminsMessage: + status: RestrictWorkspaceAdminsMessageStatus + + def as_dict(self) -> dict: + """Serializes the RestrictWorkspaceAdminsMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsMessage: + """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary.""" + return cls(status=_enum(d, "status", RestrictWorkspaceAdminsMessageStatus)) + + +class RestrictWorkspaceAdminsMessageStatus(Enum): + + ALLOW_ALL = "ALLOW_ALL" + RESTRICT_TOKENS_AND_JOB_RUN_AS = "RESTRICT_TOKENS_AND_JOB_RUN_AS" + + @dataclass class Setting: + aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None + + aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None + + automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None + boolean_val: Optional[BooleanMessage] = None + effective_aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None + + effective_aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None + + effective_automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None + effective_boolean_val: Optional[BooleanMessage] = None effective_integer_val: Optional[IntegerMessage] = None + effective_personal_compute: Optional[PersonalComputeMessage] = None + + effective_restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None + effective_string_val: Optional[StringMessage] = None integer_val: Optional[IntegerMessage] = None @@ -149,23 +514,53 @@ class Setting: name: Optional[str] = None """Name of the setting.""" + personal_compute: Optional[PersonalComputeMessage] = None + + restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None + string_val: Optional[StringMessage] = None def as_dict(self) -> dict: """Serializes the Setting into a dictionary suitable for use as a JSON request body.""" body = {} + if self.aibi_dashboard_embedding_access_policy: + body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy.as_dict() + if self.aibi_dashboard_embedding_approved_domains: + body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains.as_dict() + if self.automatic_cluster_update_workspace: + body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace.as_dict() if self.boolean_val: body["boolean_val"] = self.boolean_val.as_dict() + if self.effective_aibi_dashboard_embedding_access_policy: + body["effective_aibi_dashboard_embedding_access_policy"] = ( + self.effective_aibi_dashboard_embedding_access_policy.as_dict() + ) + if self.effective_aibi_dashboard_embedding_approved_domains: + body["effective_aibi_dashboard_embedding_approved_domains"] = ( + self.effective_aibi_dashboard_embedding_approved_domains.as_dict() + ) + if self.effective_automatic_cluster_update_workspace: + body["effective_automatic_cluster_update_workspace"] = ( + self.effective_automatic_cluster_update_workspace.as_dict() + ) if self.effective_boolean_val: body["effective_boolean_val"] = self.effective_boolean_val.as_dict() if self.effective_integer_val: body["effective_integer_val"] = self.effective_integer_val.as_dict() + if self.effective_personal_compute: + body["effective_personal_compute"] = self.effective_personal_compute.as_dict() + if self.effective_restrict_workspace_admins: + body["effective_restrict_workspace_admins"] = self.effective_restrict_workspace_admins.as_dict() if self.effective_string_val: body["effective_string_val"] = self.effective_string_val.as_dict() if self.integer_val: body["integer_val"] = self.integer_val.as_dict() if self.name is not None: body["name"] = self.name + if self.personal_compute: + body["personal_compute"] = self.personal_compute.as_dict() + if self.restrict_workspace_admins: + body["restrict_workspace_admins"] = self.restrict_workspace_admins.as_dict() if self.string_val: body["string_val"] = self.string_val.as_dict() return body @@ -173,18 +568,42 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Setting into a shallow dictionary of its immediate attributes.""" body = {} + if self.aibi_dashboard_embedding_access_policy: + body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy + if self.aibi_dashboard_embedding_approved_domains: + body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains + if self.automatic_cluster_update_workspace: + body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace if self.boolean_val: body["boolean_val"] = self.boolean_val + if self.effective_aibi_dashboard_embedding_access_policy: + body["effective_aibi_dashboard_embedding_access_policy"] = ( + self.effective_aibi_dashboard_embedding_access_policy + ) + if self.effective_aibi_dashboard_embedding_approved_domains: + body["effective_aibi_dashboard_embedding_approved_domains"] = ( + self.effective_aibi_dashboard_embedding_approved_domains + ) + if self.effective_automatic_cluster_update_workspace: + body["effective_automatic_cluster_update_workspace"] = self.effective_automatic_cluster_update_workspace if self.effective_boolean_val: body["effective_boolean_val"] = self.effective_boolean_val if self.effective_integer_val: body["effective_integer_val"] = self.effective_integer_val + if self.effective_personal_compute: + body["effective_personal_compute"] = self.effective_personal_compute + if self.effective_restrict_workspace_admins: + body["effective_restrict_workspace_admins"] = self.effective_restrict_workspace_admins if self.effective_string_val: body["effective_string_val"] = self.effective_string_val if self.integer_val: body["integer_val"] = self.integer_val if self.name is not None: body["name"] = self.name + if self.personal_compute: + body["personal_compute"] = self.personal_compute + if self.restrict_workspace_admins: + body["restrict_workspace_admins"] = self.restrict_workspace_admins if self.string_val: body["string_val"] = self.string_val return body @@ -193,12 +612,36 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Setting: """Deserializes the Setting from a dictionary.""" return cls( + aibi_dashboard_embedding_access_policy=_from_dict( + d, "aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy + ), + aibi_dashboard_embedding_approved_domains=_from_dict( + d, "aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains + ), + automatic_cluster_update_workspace=_from_dict( + d, "automatic_cluster_update_workspace", ClusterAutoRestartMessage + ), boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + effective_aibi_dashboard_embedding_access_policy=_from_dict( + d, "effective_aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy + ), + effective_aibi_dashboard_embedding_approved_domains=_from_dict( + d, "effective_aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains + ), + effective_automatic_cluster_update_workspace=_from_dict( + d, "effective_automatic_cluster_update_workspace", ClusterAutoRestartMessage + ), effective_boolean_val=_from_dict(d, "effective_boolean_val", BooleanMessage), effective_integer_val=_from_dict(d, "effective_integer_val", IntegerMessage), + effective_personal_compute=_from_dict(d, "effective_personal_compute", PersonalComputeMessage), + effective_restrict_workspace_admins=_from_dict( + d, "effective_restrict_workspace_admins", RestrictWorkspaceAdminsMessage + ), effective_string_val=_from_dict(d, "effective_string_val", StringMessage), integer_val=_from_dict(d, "integer_val", IntegerMessage), name=d.get("name", None), + personal_compute=_from_dict(d, "personal_compute", PersonalComputeMessage), + restrict_workspace_admins=_from_dict(d, "restrict_workspace_admins", RestrictWorkspaceAdminsMessage), string_val=_from_dict(d, "string_val", StringMessage), ) @@ -286,7 +729,8 @@ def __init__(self, api_client): self._api = api_client def get_public_account_setting(self, name: str) -> Setting: - """Get a setting value at account level + """Get a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of + setting available via public APIs at account level. :param name: str @@ -303,9 +747,8 @@ def get_public_account_setting(self, name: str) -> Setting: def list_account_settings_metadata( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SettingsMetadata]: - """List valid setting keys and metadata. These settings are available to referenced via [GET - /api/2.1/settings/{name}](#~1api~1account~1settingsv2~1getpublicaccountsetting) and [PATCH - /api/2.1/settings/{name}](#~1api~1account~1settingsv2~patchpublicaccountsetting) APIs + """List valid setting keys and metadata. These settings are available to be referenced via GET + :method:settingsv2/getpublicaccountsetting and PATCH :method:settingsv2/patchpublicaccountsetting APIs :param page_size: int (optional) The maximum number of settings to return. The service may return fewer than this value. If @@ -342,7 +785,8 @@ def list_account_settings_metadata( query["page_token"] = json["next_page_token"] def patch_public_account_setting(self, name: str, setting: Setting) -> Setting: - """Patch a setting value at account level + """Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of + setting available via public APIs at account level. :param name: str :param setting: :class:`Setting` @@ -368,7 +812,8 @@ def __init__(self, api_client): self._api = api_client def get_public_workspace_setting(self, name: str) -> Setting: - """Get a setting value at workspace level + """Get a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for list + of setting available via public APIs. :param name: str @@ -385,9 +830,9 @@ def get_public_workspace_setting(self, name: str) -> Setting: def list_workspace_settings_metadata( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SettingsMetadata]: - """List valid setting keys and metadata. These settings are available to referenced via [GET - /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~1getpublicworkspacesetting) and [PATCH - /api/2.1/settings/{name}](#~1api~1workspace~1settingsv2~patchpublicworkspacesetting) APIs + """List valid setting keys and metadata. These settings are available to be referenced via GET + :method:settingsv2/getpublicworkspacesetting and PATCH :method:settingsv2/patchpublicworkspacesetting + APIs :param page_size: int (optional) The maximum number of settings to return. The service may return fewer than this value. If @@ -422,7 +867,8 @@ def list_workspace_settings_metadata( query["page_token"] = json["next_page_token"] def patch_public_workspace_setting(self, name: str, setting: Setting) -> Setting: - """Patch a setting value at workspace level + """Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for + list of setting available via public APIs at workspace level. :param name: str :param setting: :class:`Setting` diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 065da110d..42f1f505c 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1857,59 +1857,32 @@ def from_dict(cls, d: Dict[str, Any]) -> SecurablePropertiesKvPairs: @dataclass class Share: - comment: Optional[str] = None - """The comment of the share.""" - - display_name: Optional[str] = None - """The display name of the share. If defined, it will be shown in the UI.""" - id: Optional[str] = None name: Optional[str] = None - tags: Optional[List[catalog.TagKeyValue]] = None - """The tags of the share.""" - def as_dict(self) -> dict: """Serializes the Share into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.display_name is not None: - body["display_name"] = self.display_name if self.id is not None: body["id"] = self.id if self.name is not None: body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Share into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.display_name is not None: - body["display_name"] = self.display_name if self.id is not None: body["id"] = self.id if self.name is not None: body["name"] = self.name - if self.tags: - body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Share: """Deserializes the Share from a dictionary.""" - return cls( - comment=d.get("comment", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", catalog.TagKeyValue), - ) + return cls(id=d.get("id", None), name=d.get("name", None)) @dataclass @@ -2373,6 +2346,9 @@ def from_dict(cls, d: Dict[str, Any]) -> Table: class TableInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" + auxiliary_managed_location: Optional[str] = None + """Managed Delta Metadata location for foreign iceberg tables.""" + parent_storage_location: Optional[str] = None """Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of the parent UC entity's storage_location, following the same logic as getManagedEntityPath in @@ -2393,6 +2369,8 @@ class TableInternalAttributes: def as_dict(self) -> dict: """Serializes the TableInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} + if self.auxiliary_managed_location is not None: + body["auxiliary_managed_location"] = self.auxiliary_managed_location if self.parent_storage_location is not None: body["parent_storage_location"] = self.parent_storage_location if self.storage_location is not None: @@ -2406,6 +2384,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TableInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} + if self.auxiliary_managed_location is not None: + body["auxiliary_managed_location"] = self.auxiliary_managed_location if self.parent_storage_location is not None: body["parent_storage_location"] = self.parent_storage_location if self.storage_location is not None: @@ -2420,6 +2400,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TableInternalAttributes: """Deserializes the TableInternalAttributes from a dictionary.""" return cls( + auxiliary_managed_location=d.get("auxiliary_managed_location", None), parent_storage_location=d.get("parent_storage_location", None), storage_location=d.get("storage_location", None), type=_enum(d, "type", TableInternalAttributesSharedTableType), @@ -2432,8 +2413,10 @@ class TableInternalAttributesSharedTableType(Enum): DELTA_ICEBERG_TABLE = "DELTA_ICEBERG_TABLE" DIRECTORY_BASED_TABLE = "DIRECTORY_BASED_TABLE" FILE_BASED_TABLE = "FILE_BASED_TABLE" + FOREIGN_ICEBERG_TABLE = "FOREIGN_ICEBERG_TABLE" FOREIGN_TABLE = "FOREIGN_TABLE" MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + METRIC_VIEW = "METRIC_VIEW" STREAMING_TABLE = "STREAMING_TABLE" VIEW = "VIEW" @@ -3441,7 +3424,9 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}", query=query, headers=headers) return ShareInfo.from_dict(res) - def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: + def list_shares( + self, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ShareInfo]: """Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index b35039c98..0b4acc46e 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -646,6 +646,10 @@ class AlertV2: display_name: Optional[str] = None """The display name of the alert.""" + effective_run_as: Optional[AlertV2RunAs] = None + """The actual identity that will be used to execute the alert. This is an output-only field that + shows the resolved run-as identity after applying permissions and defaults.""" + evaluation: Optional[AlertV2Evaluation] = None id: Optional[str] = None @@ -664,10 +668,18 @@ class AlertV2: query_text: Optional[str] = None """Text of the query to be run.""" + run_as: Optional[AlertV2RunAs] = None + """Specifies the identity that will be used to run the alert. This field allows you to configure + alerts to run as a specific user or service principal. - For user identity: Set `user_name` to + the email of an active workspace user. Users can only set this to their own email. - For service + principal: Set `service_principal_name` to the application ID. Requires the + `servicePrincipal/user` role. If not specified, the alert will run as the request user.""" + run_as_user_name: Optional[str] = None """The run as username or application ID of service principal. On Create and Update, this field can be set to application ID of an active service principal. Setting this field requires the - servicePrincipal/user role.""" + servicePrincipal/user role. Deprecated: Use `run_as` field instead. This field will be removed + in a future release.""" schedule: Optional[CronSchedule] = None @@ -688,6 +700,8 @@ def as_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name + if self.effective_run_as: + body["effective_run_as"] = self.effective_run_as.as_dict() if self.evaluation: body["evaluation"] = self.evaluation.as_dict() if self.id is not None: @@ -700,6 +714,8 @@ def as_dict(self) -> dict: body["parent_path"] = self.parent_path if self.query_text is not None: body["query_text"] = self.query_text + if self.run_as: + body["run_as"] = self.run_as.as_dict() if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.schedule: @@ -721,6 +737,8 @@ def as_shallow_dict(self) -> dict: body["custom_summary"] = self.custom_summary if self.display_name is not None: body["display_name"] = self.display_name + if self.effective_run_as: + body["effective_run_as"] = self.effective_run_as if self.evaluation: body["evaluation"] = self.evaluation if self.id is not None: @@ -733,6 +751,8 @@ def as_shallow_dict(self) -> dict: body["parent_path"] = self.parent_path if self.query_text is not None: body["query_text"] = self.query_text + if self.run_as: + body["run_as"] = self.run_as if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.schedule: @@ -751,12 +771,14 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2: custom_description=d.get("custom_description", None), custom_summary=d.get("custom_summary", None), display_name=d.get("display_name", None), + effective_run_as=_from_dict(d, "effective_run_as", AlertV2RunAs), evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), id=d.get("id", None), lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), owner_user_name=d.get("owner_user_name", None), parent_path=d.get("parent_path", None), query_text=d.get("query_text", None), + run_as=_from_dict(d, "run_as", AlertV2RunAs), run_as_user_name=d.get("run_as_user_name", None), schedule=_from_dict(d, "schedule", CronSchedule), update_time=d.get("update_time", None), @@ -770,7 +792,8 @@ class AlertV2Evaluation: """Operator used for comparison in alert evaluation.""" empty_result_state: Optional[AlertEvaluationState] = None - """Alert state if result is empty.""" + """Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because + `UNKNOWN` state is planned to be deprecated.""" last_evaluated_at: Optional[str] = None """Timestamp of the last evaluation.""" @@ -992,6 +1015,39 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandValue: ) +@dataclass +class AlertV2RunAs: + service_principal_name: Optional[str] = None + """Application ID of an active service principal. Setting this field requires the + `servicePrincipal/user` role.""" + + user_name: Optional[str] = None + """The email of an active workspace user. Can only set this field to their own email.""" + + def as_dict(self) -> dict: + """Serializes the AlertV2RunAs into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AlertV2RunAs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AlertV2RunAs: + """Deserializes the AlertV2RunAs from a dictionary.""" + return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) + + @dataclass class AlertV2Subscription: destination_id: Optional[str] = None @@ -1024,9 +1080,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2Subscription: @dataclass class BaseChunkInfo: - """Describes metadata for a particular chunk, within a result set; this structure is used both - within a manifest, and when fetching individual chunk data or links.""" - byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -1630,8 +1683,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization class CreateWarehouseRequestWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2195,8 +2246,6 @@ class Disposition(Enum): class EditWarehouseRequestWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2285,6 +2334,7 @@ class EndpointHealth: """Deprecated. split into summary and details for security""" status: Optional[Status] = None + """Health status of the endpoint.""" summary: Optional[str] = None """A short summary of the health status in case of degraded/failed warehouses.""" @@ -2407,8 +2457,10 @@ class EndpointInfo: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the endpoint should use spot instances.""" state: Optional[State] = None + """state of the endpoint""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -2538,8 +2590,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: class EndpointInfoWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -2669,6 +2719,9 @@ class ExternalLink: which point a new `external_link` must be requested.""" external_link: Optional[str] = None + """A URL pointing to a chunk of result data, hosted by an external service, with a short expiration + time (<= 15 minutes). As this URL contains a temporary credential, it should be considered + sensitive and the client should not expose this URL in a log.""" http_headers: Optional[Dict[str, str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is @@ -2679,7 +2732,7 @@ class ExternalLink: next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a - :method:statementexecution/getStatementResultChunkN request.""" + :method:statementexecution/getstatementresultchunkn request.""" next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more @@ -3021,8 +3074,10 @@ class GetWarehouseResponse: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the endpoint should use spot instances.""" state: Optional[State] = None + """state of the endpoint""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -3031,6 +3086,8 @@ class GetWarehouseResponse: Supported values: - Number of tags < 45.""" warehouse_type: Optional[GetWarehouseResponseWarehouseType] = None + """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`.""" def as_dict(self) -> dict: """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body.""" @@ -3150,8 +3207,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWarehouseResponse: class GetWarehouseResponseWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -3170,6 +3225,9 @@ class GetWorkspaceWarehouseConfigResponse: """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" + enable_serverless_compute: Optional[bool] = None + """Enable Serverless compute for SQL warehouses""" + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -3184,7 +3242,8 @@ class GetWorkspaceWarehouseConfigResponse: """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" instance_profile_arn: Optional[str] = None - """AWS Only: Instance profile used to pass IAM role to the cluster""" + """AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This + configuration is also applied to the workspace's serverless compute for notebooks and jobs.""" security_policy: Optional[GetWorkspaceWarehouseConfigResponseSecurityPolicy] = None """Security policy for warehouses""" @@ -3201,6 +3260,8 @@ def as_dict(self) -> dict: body["config_param"] = self.config_param.as_dict() if self.data_access_config: body["data_access_config"] = [v.as_dict() for v in self.data_access_config] + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute if self.enabled_warehouse_types: body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] if self.global_param: @@ -3224,6 +3285,8 @@ def as_shallow_dict(self) -> dict: body["config_param"] = self.config_param if self.data_access_config: body["data_access_config"] = self.data_access_config + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute if self.enabled_warehouse_types: body["enabled_warehouse_types"] = self.enabled_warehouse_types if self.global_param: @@ -3245,6 +3308,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: channel=_from_dict(d, "channel", Channel), config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), + enable_serverless_compute=d.get("enable_serverless_compute", None), enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), google_service_account=d.get("google_service_account", None), @@ -3255,7 +3319,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): - """Security policy for warehouses""" + """Security policy to be used for warehouses""" DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" NONE = "NONE" @@ -3859,13 +3923,18 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponseAlert: @dataclass class ListAlertsV2Response: + alerts: Optional[List[AlertV2]] = None + next_page_token: Optional[str] = None results: Optional[List[AlertV2]] = None + """Deprecated. Use `alerts` instead.""" def as_dict(self) -> dict: """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body.""" body = {} + if self.alerts: + body["alerts"] = [v.as_dict() for v in self.alerts] if self.next_page_token is not None: body["next_page_token"] = self.next_page_token if self.results: @@ -3875,6 +3944,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the ListAlertsV2Response into a shallow dictionary of its immediate attributes.""" body = {} + if self.alerts: + body["alerts"] = self.alerts if self.next_page_token is not None: body["next_page_token"] = self.next_page_token if self.results: @@ -3884,7 +3955,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response: """Deserializes the ListAlertsV2Response from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2)) + return cls( + alerts=_repeated_dict(d, "alerts", AlertV2), + next_page_token=d.get("next_page_token", None), + results=_repeated_dict(d, "results", AlertV2), + ) class ListOrder(Enum): @@ -4197,12 +4272,18 @@ def from_dict(cls, d: Dict[str, Any]) -> ListVisualizationsForQueryResponse: @dataclass class ListWarehousesResponse: + next_page_token: Optional[str] = None + """A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, + there are no subsequent pages.""" + warehouses: Optional[List[EndpointInfo]] = None """A list of warehouses and their configurations.""" def as_dict(self) -> dict: """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.warehouses: body["warehouses"] = [v.as_dict() for v in self.warehouses] return body @@ -4210,6 +4291,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token if self.warehouses: body["warehouses"] = self.warehouses return body @@ -4217,7 +4300,9 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListWarehousesResponse: """Deserializes the ListWarehousesResponse from a dictionary.""" - return cls(warehouses=_repeated_dict(d, "warehouses", EndpointInfo)) + return cls( + next_page_token=d.get("next_page_token", None), warehouses=_repeated_dict(d, "warehouses", EndpointInfo) + ) @dataclass @@ -4723,6 +4808,9 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryFilter: @dataclass class QueryInfo: + cache_query_id: Optional[str] = None + """The ID of the cached query if this result retrieved from cache""" + channel_used: Optional[ChannelInfo] = None """SQL Warehouse channel information at the time of query execution""" @@ -4808,6 +4896,8 @@ class QueryInfo: def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.cache_query_id is not None: + body["cache_query_id"] = self.cache_query_id if self.channel_used: body["channel_used"] = self.channel_used.as_dict() if self.client_application is not None: @@ -4861,6 +4951,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the QueryInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.cache_query_id is not None: + body["cache_query_id"] = self.cache_query_id if self.channel_used: body["channel_used"] = self.channel_used if self.client_application is not None: @@ -4915,6 +5007,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" return cls( + cache_query_id=d.get("cache_query_id", None), channel_used=_from_dict(d, "channel_used", ChannelInfo), client_application=d.get("client_application", None), duration=d.get("duration", None), @@ -5476,6 +5569,12 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreResponse: @dataclass class ResultData: + """Contains the result data of a single chunk when using `INLINE` disposition. When using + `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide URLs to the + result data in cloud storage. Exactly one of these alternatives is used. (While the + `external_links` array prepares the API to return multiple links in a single response. Currently + only a single link is returned.)""" + byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -5492,7 +5591,7 @@ class ResultData: next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a - :method:statementexecution/getStatementResultChunkN request.""" + :method:statementexecution/getstatementresultchunkn request.""" next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more @@ -5780,7 +5879,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SetResponse: class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum): - """Security policy for warehouses""" + """Security policy to be used for warehouses""" DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" NONE = "NONE" @@ -5806,7 +5905,20 @@ def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigResponse: class SpotInstancePolicy(Enum): - """Configurations whether the warehouse should use spot instances.""" + """EndpointSpotInstancePolicy configures whether the endpoint should use spot instances. + + The breakdown of how the EndpointSpotInstancePolicy converts to per cloud configurations is: + + +-------+--------------------------------------+--------------------------------+ | Cloud | + COST_OPTIMIZED | RELIABILITY_OPTIMIZED | + +-------+--------------------------------------+--------------------------------+ | AWS | On + Demand Driver with Spot Executors | On Demand Driver and Executors | | AZURE | On Demand Driver + and Executors | On Demand Driver and Executors | + +-------+--------------------------------------+--------------------------------+ + + While including "spot" in the enum name may limit the the future extensibility of this field + because it limits this enum to denoting "spot or not", this is the field that PM recommends + after discussion with customers per SC-48783.""" COST_OPTIMIZED = "COST_OPTIMIZED" POLICY_UNSPECIFIED = "POLICY_UNSPECIFIED" @@ -5832,7 +5944,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StartWarehouseResponse: class State(Enum): - """State of the warehouse""" + """* State of a warehouse.""" DELETED = "DELETED" DELETING = "DELETING" @@ -5936,11 +6048,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StatementResponse: class StatementState(Enum): - """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution - failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; - can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: - execution successful, and statement closed; result no longer available for fetch""" CANCELED = "CANCELED" CLOSED = "CLOSED" @@ -5957,6 +6064,11 @@ class StatementStatus: error: Optional[ServiceError] = None state: Optional[StatementState] = None + """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - + `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution + failed; reason for failure described in accompanying error message - `CANCELED`: user canceled; + can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: + execution successful, and statement closed; result no longer available for fetch""" def as_dict(self) -> dict: """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body.""" @@ -5983,12 +6095,10 @@ def from_dict(cls, d: Dict[str, Any]) -> StatementStatus: class Status(Enum): - """Health status of the warehouse.""" DEGRADED = "DEGRADED" FAILED = "FAILED" HEALTHY = "HEALTHY" - STATUS_UNSPECIFIED = "STATUS_UNSPECIFIED" @dataclass @@ -6139,20 +6249,35 @@ def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: class TerminationReasonCode(Enum): - """status code indicating why the cluster was terminated""" + """The status code indicating why the cluster was terminated""" ABUSE_DETECTED = "ABUSE_DETECTED" + ACCESS_TOKEN_FAILURE = "ACCESS_TOKEN_FAILURE" + ALLOCATION_TIMEOUT = "ALLOCATION_TIMEOUT" + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = "ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY" + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS" + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_READY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS" ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" + AWS_INACCESSIBLE_KMS_KEY_FAILURE = "AWS_INACCESSIBLE_KMS_KEY_FAILURE" + AWS_INSTANCE_PROFILE_UPDATE_FAILURE = "AWS_INSTANCE_PROFILE_UPDATE_FAILURE" AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + AWS_INVALID_KEY_PAIR = "AWS_INVALID_KEY_PAIR" + AWS_INVALID_KMS_KEY_STATE = "AWS_INVALID_KMS_KEY_STATE" AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" + AWS_RESOURCE_QUOTA_EXCEEDED = "AWS_RESOURCE_QUOTA_EXCEEDED" AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = "AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE" AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" @@ -6161,65 +6286,148 @@ class TerminationReasonCode(Enum): AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG" + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED" + BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE" + CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE" + CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED" CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = "CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED" CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG" CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG" CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" + CLUSTER_OPERATION_THROTTLED = "CLUSTER_OPERATION_THROTTLED" + CLUSTER_OPERATION_TIMEOUT = "CLUSTER_OPERATION_TIMEOUT" COMMUNICATION_LOST = "COMMUNICATION_LOST" CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = "CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG" DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" + DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" + DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" + DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" + DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" + DRIVER_EVICTION = "DRIVER_EVICTION" + DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" + DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" + DRIVER_OUT_OF_DISK = "DRIVER_OUT_OF_DISK" + DRIVER_OUT_OF_MEMORY = "DRIVER_OUT_OF_MEMORY" + DRIVER_POD_CREATION_FAILURE = "DRIVER_POD_CREATION_FAILURE" + DRIVER_UNEXPECTED_FAILURE = "DRIVER_UNEXPECTED_FAILURE" + DRIVER_UNHEALTHY = "DRIVER_UNHEALTHY" DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = "DYNAMIC_SPARK_CONF_SIZE_EXCEEDED" + EOS_SPARK_IMAGE = "EOS_SPARK_IMAGE" EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" + EXECUTOR_POD_UNSCHEDULED = "EXECUTOR_POD_UNSCHEDULED" + GCP_API_RATE_QUOTA_EXCEEDED = "GCP_API_RATE_QUOTA_EXCEEDED" + GCP_DENIED_BY_ORG_POLICY = "GCP_DENIED_BY_ORG_POLICY" + GCP_FORBIDDEN = "GCP_FORBIDDEN" + GCP_IAM_TIMEOUT = "GCP_IAM_TIMEOUT" + GCP_INACCESSIBLE_KMS_KEY_FAILURE = "GCP_INACCESSIBLE_KMS_KEY_FAILURE" + GCP_INSUFFICIENT_CAPACITY = "GCP_INSUFFICIENT_CAPACITY" + GCP_IP_SPACE_EXHAUSTED = "GCP_IP_SPACE_EXHAUSTED" + GCP_KMS_KEY_PERMISSION_DENIED = "GCP_KMS_KEY_PERMISSION_DENIED" + GCP_NOT_FOUND = "GCP_NOT_FOUND" GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" + GCP_RESOURCE_QUOTA_EXCEEDED = "GCP_RESOURCE_QUOTA_EXCEEDED" + GCP_SERVICE_ACCOUNT_ACCESS_DENIED = "GCP_SERVICE_ACCOUNT_ACCESS_DENIED" GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" + GCP_SERVICE_ACCOUNT_NOT_FOUND = "GCP_SERVICE_ACCOUNT_NOT_FOUND" + GCP_SUBNET_NOT_READY = "GCP_SUBNET_NOT_READY" + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = "GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED" + GKE_BASED_CLUSTER_TERMINATION = "GKE_BASED_CLUSTER_TERMINATION" GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" INACTIVITY = "INACTIVITY" + INIT_CONTAINER_NOT_FINISHED = "INIT_CONTAINER_NOT_FINISHED" INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" + INSTANCE_POOL_MAX_CAPACITY_REACHED = "INSTANCE_POOL_MAX_CAPACITY_REACHED" + INSTANCE_POOL_NOT_FOUND = "INSTANCE_POOL_NOT_FOUND" INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = "INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG" + INTERNAL_CAPACITY_FAILURE = "INTERNAL_CAPACITY_FAILURE" INTERNAL_ERROR = "INTERNAL_ERROR" INVALID_ARGUMENT = "INVALID_ARGUMENT" + INVALID_AWS_PARAMETER = "INVALID_AWS_PARAMETER" + INVALID_INSTANCE_PLACEMENT_PROTOCOL = "INVALID_INSTANCE_PLACEMENT_PROTOCOL" INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" + INVALID_WORKER_IMAGE_FAILURE = "INVALID_WORKER_IMAGE_FAILURE" + IN_PENALTY_BOX = "IN_PENALTY_BOX" IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" JOB_FINISHED = "JOB_FINISHED" K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT" + MAINTENANCE_MODE = "MAINTENANCE_MODE" METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" + NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT" + NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE" + NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE" + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE" + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE" + NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE" + NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" + NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" + NO_MATCHED_K8S = "NO_MATCHED_K8S" + NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" + POD_ASSIGNMENT_FAILURE = "POD_ASSIGNMENT_FAILURE" + POD_SCHEDULING_FAILURE = "POD_SCHEDULING_FAILURE" REQUEST_REJECTED = "REQUEST_REJECTED" REQUEST_THROTTLED = "REQUEST_THROTTLED" + RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED" + SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" + SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" + SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" SPARK_ERROR = "SPARK_ERROR" SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" + SPARK_IMAGE_DOWNLOAD_THROTTLED = "SPARK_IMAGE_DOWNLOAD_THROTTLED" + SPARK_IMAGE_NOT_FOUND = "SPARK_IMAGE_NOT_FOUND" SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" + SSH_BOOTSTRAP_FAILURE = "SSH_BOOTSTRAP_FAILURE" STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = "STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG" + STORAGE_DOWNLOAD_FAILURE_SLOW = "STORAGE_DOWNLOAD_FAILURE_SLOW" + STORAGE_DOWNLOAD_FAILURE_THROTTLED = "STORAGE_DOWNLOAD_FAILURE_THROTTLED" STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" TRIAL_EXPIRED = "TRIAL_EXPIRED" UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" + UNEXPECTED_POD_RECREATION = "UNEXPECTED_POD_RECREATION" UNKNOWN = "UNKNOWN" UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" + USAGE_POLICY_ENTITLEMENT_DENIED = "USAGE_POLICY_ENTITLEMENT_DENIED" + USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" USER_REQUEST = "USER_REQUEST" WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" + WORKSPACE_UPDATE = "WORKSPACE_UPDATE" class TerminationReasonType(Enum): @@ -6933,12 +7141,14 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: @dataclass class WarehouseTypePair: + """* Configuration values to enable or disable the access to specific warehouse types in the + workspace.""" + enabled: Optional[bool] = None """If set to false the specific warehouse type will not be be allowed as a value for warehouse_type in CreateWarehouse and EditWarehouse""" warehouse_type: Optional[WarehouseTypePairWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`.""" def as_dict(self) -> dict: """Serializes the WarehouseTypePair into a dictionary suitable for use as a JSON request body.""" @@ -6967,7 +7177,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehouseTypePair: class WarehouseTypePairWarehouseType(Enum): - """Warehouse type: `PRO` or `CLASSIC`.""" CLASSIC = "CLASSIC" PRO = "PRO" @@ -8744,17 +8953,17 @@ class StatementExecutionAPI: the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 - seconds; if the statement execution finishes within this time, the result data is returned directly in the - response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns - with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call - doesn't wait for the statement to finish but returns directly with a statement ID. The status of the - statement execution can be polled by issuing :method:statementexecution/getStatement with the statement - ID. Once the execution has succeeded, this call also returns the result and metadata in the response. - - Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10 - seconds; if the statement execution finishes within this time, the result data is returned directly in the - response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can - be used to fetch status and results in the same way as in the asynchronous mode. + In summary: - **Synchronous mode** (`wait_timeout=30s` and `on_wait_timeout=CANCEL`): The call waits up to + 30 seconds; if the statement execution finishes within this time, the result data is returned directly in + the response. If the execution takes longer than 30 seconds, the execution is canceled and the call + returns with a `CANCELED` state. - **Asynchronous mode** (`wait_timeout=0s` and `on_wait_timeout` is + ignored): The call doesn't wait for the statement to finish but returns directly with a statement ID. The + status of the statement execution can be polled by issuing :method:statementexecution/getStatement with + the statement ID. Once the execution has succeeded, this call also returns the result and metadata in the + response. - **[Default] Hybrid mode** (`wait_timeout=10s` and `on_wait_timeout=CONTINUE`): The call waits + for up to 10 seconds; if the statement execution finishes within this time, the result data is returned + directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The + statement ID can be used to fetch status and results in the same way as in the asynchronous mode. Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest @@ -8809,7 +9018,7 @@ def __init__(self, api_client): def cancel_execution(self, statement_id: str): """Requests that an executing statement be canceled. Callers must poll for status to see the terminal - state. + state. Cancel response is empty; receiving response indicates successful receipt. :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -8837,7 +9046,52 @@ def execute_statement( schema: Optional[str] = None, wait_timeout: Optional[str] = None, ) -> StatementResponse: - """Execute a SQL statement + """Execute a SQL statement and optionally await its results for a specified time. + + **Use case: small result sets with INLINE + JSON_ARRAY** + + For flows that generate small and predictable result sets (<= 25 MiB), `INLINE` responses of + `JSON_ARRAY` result data are typically the simplest way to execute and fetch result data. + + **Use case: large result sets with EXTERNAL_LINKS** + + Using `EXTERNAL_LINKS` to fetch result data allows you to fetch large result sets efficiently. The + main differences from using `INLINE` disposition are that the result data is accessed with URLs, and + that there are 3 supported formats: `JSON_ARRAY`, `ARROW_STREAM` and `CSV` compared to only + `JSON_ARRAY` with `INLINE`. + + ** URLs** + + External links point to data stored within your workspace's internal storage, in the form of a URL. + The URLs are valid for only a short period, <= 15 minutes. Alongside each `external_link` is an + expiration field indicating the time at which the URL is no longer valid. In `EXTERNAL_LINKS` mode, + chunks can be resolved and fetched multiple times and in parallel. + + ---- + + ### **Warning: Databricks strongly recommends that you protect the URLs that are returned by the + `EXTERNAL_LINKS` disposition.** + + When you use the `EXTERNAL_LINKS` disposition, a short-lived, URL is generated, which can be used to + download the results directly from . As a short-lived is embedded in this URL, you should protect the + URL. + + Because URLs are already generated with embedded temporary s, you must not set an `Authorization` + header in the download requests. + + The `EXTERNAL_LINKS` disposition can be disabled upon request by creating a support case. + + See also [Security best practices]. + + ---- + + StatementResponse contains `statement_id` and `status`; other fields might be absent or present + depending on context. If the SQL warehouse fails to execute the provided statement, a 200 response is + returned with `status.state` set to `FAILED` (in contrast to a failure when accepting the request, + which results in a non-200 response). Details of the error can be found at `status.error` in case of + execution failures. + + [Security best practices]: https://docs.databricks.com/sql/admin/sql-execution-tutorial.html#security-best-practices :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. The @@ -8851,12 +9105,32 @@ def execute_statement( representations and might not match the final size in the requested `format`. If the result was truncated due to the byte limit, then `truncated` in the response is set to `true`. When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not - explcitly set. + explicitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) + The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. + + Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` + format, in a series of chunks. If a given statement produces a result set with a size larger than 25 + MiB, that statement execution is aborted, and no result set will be available. + + **NOTE** Byte limits are computed based upon internal representations of the result set data, and + might not match the sizes visible in JSON responses. + + Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: + URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition + allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The + resulting links have two important properties: + + 1. They point to resources _external_ to the Databricks compute; therefore any associated + authentication information (typically a personal access token, OAuth token, or similar) _must be + removed_ when fetching from these links. + + 2. These are URLs with a specific expiration, indicated in the response. The behavior when + attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -8907,13 +9181,13 @@ def execute_statement( For example, the following statement contains two parameters, `my_name` and `my_date`: - SELECT * FROM my_table WHERE name = :my_name AND date = :my_date + ``` SELECT * FROM my_table WHERE name = :my_name AND date = :my_date ``` The parameters can be passed in the request body as follows: - { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", + ` { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": - "2020-01-01", "type": "DATE" } ] } + "2020-01-01", "type": "DATE" } ] } ` Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. @@ -8974,15 +9248,16 @@ def execute_statement( "Content-Type": "application/json", } - res = self._api.do("POST", "/api/2.0/sql/statements/", body=body, headers=headers) + res = self._api.do("POST", "/api/2.0/sql/statements", body=body, headers=headers) return StatementResponse.from_dict(res) def get_statement(self, statement_id: str) -> StatementResponse: - """This request can be used to poll for the statement's status. When the `status.state` field is - `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the - statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the - state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and - further calls will receive an HTTP 404 response. + """This request can be used to poll for the statement's status. StatementResponse contains `statement_id` + and `status`; other fields might be absent or present depending on context. When the `status.state` + field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. + When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 + with the state set. After at least 12 hours in terminal state, the statement is removed from the + warehouse and further calls will receive an HTTP 404 response. **NOTE** This call currently might take up to 5 seconds to get the latest status and result. @@ -9007,6 +9282,7 @@ def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> R can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. + Depending on `disposition`, the response returns chunks of data either inline, or as links. :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -9117,8 +9393,7 @@ def create( The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for - non-serverless warehouses - 0 indicates no autostop. + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins :param channel: :class:`Channel` (optional) @@ -9159,12 +9434,15 @@ def create( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) + Configurations whether the endpoint should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and + also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -9303,7 +9581,7 @@ def edit( Defaults to false. :param enable_serverless_compute: bool (optional) - Configures whether the warehouse should use serverless compute. + Configures whether the warehouse should use serverless compute :param instance_profile_arn: str (optional) Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) @@ -9325,12 +9603,15 @@ def edit( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) + Configurations whether the endpoint should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and + also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -9470,26 +9751,45 @@ def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: res = self._api.do("GET", "/api/2.0/sql/config/warehouses", headers=headers) return GetWorkspaceWarehouseConfigResponse.from_dict(res) - def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: - """Lists all SQL warehouses that a user has manager permissions on. + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None, run_as_user_id: Optional[int] = None + ) -> Iterator[EndpointInfo]: + """Lists all SQL warehouses that a user has access to. + + :param page_size: int (optional) + The max number of warehouses to return. + :param page_token: str (optional) + A page token, received from a previous `ListWarehouses` call. Provide this to retrieve the + subsequent page; otherwise the first will be retrieved. + When paginating, all other parameters provided to `ListWarehouses` must match the call that provided + the page token. :param run_as_user_id: int (optional) - Service Principal which will be used to fetch the list of warehouses. If not specified, the user - from the session header is used. + Service Principal which will be used to fetch the list of endpoints. If not specified, SQL Gateway + will use the user from the session header. :returns: Iterator over :class:`EndpointInfo` """ query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token if run_as_user_id is not None: query["run_as_user_id"] = run_as_user_id headers = { "Accept": "application/json", } - json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) - parsed = ListWarehousesResponse.from_dict(json).warehouses - return parsed if parsed is not None else [] + while True: + json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) + if "warehouses" in json: + for v in json["warehouses"]: + yield EndpointInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] def set_permissions( self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None @@ -9520,6 +9820,7 @@ def set_workspace_warehouse_config( channel: Optional[Channel] = None, config_param: Optional[RepeatedEndpointConfPairs] = None, data_access_config: Optional[List[EndpointConfPair]] = None, + enable_serverless_compute: Optional[bool] = None, enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, global_param: Optional[RepeatedEndpointConfPairs] = None, google_service_account: Optional[str] = None, @@ -9535,6 +9836,8 @@ def set_workspace_warehouse_config( Deprecated: Use sql_configuration_parameters :param data_access_config: List[:class:`EndpointConfPair`] (optional) Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K + :param enable_serverless_compute: bool (optional) + Enable Serverless compute for SQL warehouses :param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional) List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be @@ -9546,7 +9849,8 @@ def set_workspace_warehouse_config( :param google_service_account: str (optional) GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage :param instance_profile_arn: str (optional) - AWS Only: Instance profile used to pass IAM role to the cluster + AWS Only: The instance profile used to pass an IAM role to the SQL warehouses. This configuration is + also applied to the workspace's serverless compute for notebooks and jobs. :param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional) Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) @@ -9561,6 +9865,8 @@ def set_workspace_warehouse_config( body["config_param"] = config_param.as_dict() if data_access_config is not None: body["data_access_config"] = [v.as_dict() for v in data_access_config] + if enable_serverless_compute is not None: + body["enable_serverless_compute"] = enable_serverless_compute if enabled_warehouse_types is not None: body["enabled_warehouse_types"] = [v.as_dict() for v in enabled_warehouse_types] if global_param is not None: diff --git a/databricks/sdk/service/tags.py b/databricks/sdk/service/tags.py index 0d851907e..c6a109240 100755 --- a/databricks/sdk/service/tags.py +++ b/databricks/sdk/service/tags.py @@ -17,6 +17,7 @@ @dataclass class ListTagAssignmentsResponse: next_page_token: Optional[str] = None + """Pagination token to request the next page of tag assignments""" tag_assignments: Optional[List[TagAssignment]] = None @@ -82,12 +83,16 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTagPoliciesResponse: @dataclass class TagAssignment: entity_type: str + """The type of entity to which the tag is assigned. Allowed value is dashboards""" entity_id: str + """The identifier of the entity to which the tag is assigned""" tag_key: str + """The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed""" tag_value: Optional[str] = None + """The value of the tag""" def as_dict(self) -> dict: """Serializes the TagAssignment into a dictionary suitable for use as a JSON request body.""" @@ -130,21 +135,31 @@ def from_dict(cls, d: Dict[str, Any]) -> TagAssignment: class TagPolicy: tag_key: str + create_time: Optional[str] = None + """Timestamp when the tag policy was created""" + description: Optional[str] = None id: Optional[str] = None + update_time: Optional[str] = None + """Timestamp when the tag policy was last updated""" + values: Optional[List[Value]] = None def as_dict(self) -> dict: """Serializes the TagPolicy into a dictionary suitable for use as a JSON request body.""" body = {} + if self.create_time is not None: + body["create_time"] = self.create_time if self.description is not None: body["description"] = self.description if self.id is not None: body["id"] = self.id if self.tag_key is not None: body["tag_key"] = self.tag_key + if self.update_time is not None: + body["update_time"] = self.update_time if self.values: body["values"] = [v.as_dict() for v in self.values] return body @@ -152,12 +167,16 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TagPolicy into a shallow dictionary of its immediate attributes.""" body = {} + if self.create_time is not None: + body["create_time"] = self.create_time if self.description is not None: body["description"] = self.description if self.id is not None: body["id"] = self.id if self.tag_key is not None: body["tag_key"] = self.tag_key + if self.update_time is not None: + body["update_time"] = self.update_time if self.values: body["values"] = self.values return body @@ -166,9 +185,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TagPolicy: """Deserializes the TagPolicy from a dictionary.""" return cls( + create_time=d.get("create_time", None), description=d.get("description", None), id=d.get("id", None), tag_key=d.get("tag_key", None), + update_time=d.get("update_time", None), values=_repeated_dict(d, "values", Value), ) @@ -223,8 +244,11 @@ def delete_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str): """Delete a tag assignment :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards :param entity_id: str + The identifier of the entity to which the tag is assigned :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed """ @@ -241,8 +265,11 @@ def get_tag_assignment(self, entity_type: str, entity_id: str, tag_key: str) -> """Get a tag assignment :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards :param entity_id: str + The identifier of the entity to which the tag is assigned :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed :returns: :class:`TagAssignment` """ @@ -262,9 +289,13 @@ def list_tag_assignments( """List the tag assignments for an entity :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards :param entity_id: str + The identifier of the entity to which the tag is assigned :param page_size: int (optional) + Optional. Maximum number of tag assignments to return in a single page :param page_token: str (optional) + Pagination token to go to the next page of tag assignments. Requests first page if absent. :returns: Iterator over :class:`TagAssignment` """ @@ -280,7 +311,7 @@ def list_tag_assignments( while True: json = self._api.do( - "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}", query=query, headers=headers + "GET", f"/api/2.0/entity-tag-assignments/{entity_type}/{entity_id}/tags", query=query, headers=headers ) if "tag_assignments" in json: for v in json["tag_assignments"]: @@ -295,8 +326,11 @@ def update_tag_assignment( """Update a tag assignment :param entity_type: str + The type of entity to which the tag is assigned. Allowed value is dashboards :param entity_id: str + The identifier of the entity to which the tag is assigned :param tag_key: str + The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed :param tag_assignment: :class:`TagAssignment` :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -331,13 +365,16 @@ def update_tag_assignment( class TagPoliciesAPI: - """The Tag Policy API allows you to manage tag policies in Databricks.""" + """The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions for tag + policies can be managed using the [Account Access Control Proxy API]. + + [Account Access Control Proxy API]: https://docs.databricks.com/api/workspace/accountaccesscontrolproxy""" def __init__(self, api_client): self._api = api_client def create_tag_policy(self, tag_policy: TagPolicy) -> TagPolicy: - """Creates a new tag policy. + """Creates a new tag policy, making the associated tag key governed. :param tag_policy: :class:`TagPolicy` @@ -353,7 +390,7 @@ def create_tag_policy(self, tag_policy: TagPolicy) -> TagPolicy: return TagPolicy.from_dict(res) def delete_tag_policy(self, tag_key: str): - """Deletes a tag policy by its key. + """Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. :param tag_key: str @@ -367,7 +404,7 @@ def delete_tag_policy(self, tag_key: str): self._api.do("DELETE", f"/api/2.1/tag-policies/{tag_key}", headers=headers) def get_tag_policy(self, tag_key: str) -> TagPolicy: - """Gets a single tag policy by its key. + """Gets a single tag policy by its associated governed tag's key. :param tag_key: str @@ -384,10 +421,14 @@ def get_tag_policy(self, tag_key: str) -> TagPolicy: def list_tag_policies( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[TagPolicy]: - """Lists all tag policies in the account. + """Lists the tag policies for all governed tags in the account. :param page_size: int (optional) + The maximum number of results to return in this request. Fewer results may be returned than + requested. If unspecified or set to 0, this defaults to 1000. The maximum value is 1000; values + above 1000 will be coerced down to 1000. :param page_token: str (optional) + An optional page token received from a previous list tag policies call. :returns: Iterator over :class:`TagPolicy` """ @@ -411,7 +452,7 @@ def list_tag_policies( query["page_token"] = json["next_page_token"] def update_tag_policy(self, tag_key: str, tag_policy: TagPolicy, update_mask: str) -> TagPolicy: - """Updates an existing tag policy. + """Updates an existing tag policy for a single governed tag. :param tag_key: str :param tag_policy: :class:`TagPolicy` diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 237b2e088..887450c80 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -195,6 +195,8 @@ class DeltaSyncVectorIndexSpecRequest: effective_budget_policy_id: Optional[str] = None """The budget policy id applied to the vector search index""" + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -221,6 +223,8 @@ def as_dict(self) -> dict: body["columns_to_sync"] = [v for v in self.columns_to_sync] if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -240,6 +244,8 @@ def as_shallow_dict(self) -> dict: body["columns_to_sync"] = self.columns_to_sync if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -258,6 +264,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: return cls( columns_to_sync=d.get("columns_to_sync", None), effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -271,6 +278,8 @@ class DeltaSyncVectorIndexSpecResponse: effective_budget_policy_id: Optional[str] = None """The budget policy id applied to the vector search index""" + effective_usage_policy_id: Optional[str] = None + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -298,6 +307,8 @@ def as_dict(self) -> dict: body = {} if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -317,6 +328,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.effective_budget_policy_id is not None: body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.embedding_source_columns: body["embedding_source_columns"] = self.embedding_source_columns if self.embedding_vector_columns: @@ -336,6 +349,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" return cls( effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), embedding_writeback_table=d.get("embedding_writeback_table", None), @@ -393,7 +407,10 @@ def from_dict(cls, d: Dict[str, Any]) -> DirectAccessVectorIndexSpec: @dataclass class EmbeddingSourceColumn: embedding_model_endpoint_name: Optional[str] = None - """Name of the embedding model endpoint""" + """Name of the embedding model endpoint, used by default for both ingestion and querying.""" + + model_endpoint_name_for_query: Optional[str] = None + """Name of the embedding model endpoint which, if specified, is used for querying (not ingestion).""" name: Optional[str] = None """Name of the column""" @@ -403,6 +420,8 @@ def as_dict(self) -> dict: body = {} if self.embedding_model_endpoint_name is not None: body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name + if self.model_endpoint_name_for_query is not None: + body["model_endpoint_name_for_query"] = self.model_endpoint_name_for_query if self.name is not None: body["name"] = self.name return body @@ -412,6 +431,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.embedding_model_endpoint_name is not None: body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name + if self.model_endpoint_name_for_query is not None: + body["model_endpoint_name_for_query"] = self.model_endpoint_name_for_query if self.name is not None: body["name"] = self.name return body @@ -419,7 +440,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingSourceColumn: """Deserializes the EmbeddingSourceColumn from a dictionary.""" - return cls(embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), name=d.get("name", None)) + return cls( + embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), + model_endpoint_name_for_query=d.get("model_endpoint_name_for_query", None), + name=d.get("name", None), + ) @dataclass @@ -1108,6 +1133,24 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsResponse: return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), name=d.get("name", None)) +@dataclass +class UpdateVectorIndexUsagePolicyResponse: + def as_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateVectorIndexUsagePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateVectorIndexUsagePolicyResponse: + """Deserializes the UpdateVectorIndexUsagePolicyResponse from a dictionary.""" + return cls() + + @dataclass class UpsertDataResult: failed_primary_keys: Optional[List[str]] = None @@ -1417,7 +1460,12 @@ def wait_get_endpoint_vector_search_endpoint_online( raise TimeoutError(f"timed out after {timeout}: {status_message}") def create_endpoint( - self, name: str, endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None + self, + name: str, + endpoint_type: EndpointType, + *, + budget_policy_id: Optional[str] = None, + usage_policy_id: Optional[str] = None, ) -> Wait[EndpointInfo]: """Create a new endpoint. @@ -1427,6 +1475,8 @@ def create_endpoint( Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied + :param usage_policy_id: str (optional) + The usage policy id to be applied once we've migrated to usage policies :returns: Long-running operation waiter for :class:`EndpointInfo`. @@ -1439,6 +1489,8 @@ def create_endpoint( body["endpoint_type"] = endpoint_type.value if name is not None: body["name"] = name + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -1457,11 +1509,12 @@ def create_endpoint_and_wait( endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None, + usage_policy_id: Optional[str] = None, timeout=timedelta(minutes=20), ) -> EndpointInfo: - return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result( - timeout=timeout - ) + return self.create_endpoint( + budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name, usage_policy_id=usage_policy_id + ).result(timeout=timeout) def delete_endpoint(self, endpoint_name: str): """Delete a vector search endpoint. @@ -1527,7 +1580,8 @@ def update_endpoint_budget_policy( :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str - The budget policy id to be applied + The budget policy id to be applied (hima-sheth) TODO: remove this once we've migrated to usage + policies :returns: :class:`PatchEndpointBudgetPolicyResponse` """ @@ -1864,6 +1918,22 @@ def sync_index(self, index_name: str): self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) + def update_index_budget_policy(self, index_name: str) -> UpdateVectorIndexUsagePolicyResponse: + """Update the budget policy of an index + + :param index_name: str + Name of the vector search index + + :returns: :class:`UpdateVectorIndexUsagePolicyResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/vector-search/indexes/{index_name}/usage-policy", headers=headers) + return UpdateVectorIndexUsagePolicyResponse.from_dict(res) + def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: """Handles the upserting of data into a specified vector index. From 50a1cea0dcab7a96f967b68dbaccf740f57674ad Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Mon, 29 Sep 2025 09:43:41 +0000 Subject: [PATCH 3/3] update convert --- .codegen.json | 1 - databricks/sdk/__init__.py | 48 +++++------ databricks/sdk/service/catalog.py | 30 ++----- databricks/sdk/service/cleanrooms.py | 6 +- databricks/sdk/service/compute.py | 27 ++----- databricks/sdk/service/files.py | 72 ----------------- databricks/sdk/service/iam.py | 36 --------- databricks/sdk/service/jobs.py | 114 +-------------------------- databricks/sdk/service/oauth2.py | 18 ----- databricks/sdk/service/pipelines.py | 4 +- databricks/sdk/service/serving.py | 24 +----- databricks/sdk/service/settings.py | 72 ----------------- databricks/sdk/service/sharing.py | 20 +---- databricks/sdk/service/sql.py | 32 ++------ databricks/sdk/service/workspace.py | 90 --------------------- 15 files changed, 53 insertions(+), 541 deletions(-) diff --git a/.codegen.json b/.codegen.json index 0cf0321a5..def4cb595 100644 --- a/.codegen.json +++ b/.codegen.json @@ -15,7 +15,6 @@ ], "post_generate": [ "make fmt", - "pytest -m 'not integration' --cov=databricks --cov-report html tests", "pip install ." ] } diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 4d42ed22a..dcabb2e4a 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -299,6 +299,7 @@ def __init__( self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) + self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._functions = pkg_catalog.FunctionsAPI(self._api_client) self._genie = pkg_dashboards.GenieAPI(self._api_client) self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client) @@ -383,9 +384,8 @@ def __init__( self._workspace = WorkspaceExt(self._api_client) self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client) self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client) - self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) - self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client) + self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._groups = pkg_iam.GroupsAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._users = pkg_iam.UsersAPI(self._api_client) @@ -617,6 +617,11 @@ def files(self) -> pkg_files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" return self._files + @property + def forecasting(self) -> pkg_ml.ForecastingAPI: + """The Forecasting API allows you to create and get serverless forecasting experiments.""" + return self._forecasting + @property def functions(self) -> pkg_catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" @@ -1002,21 +1007,16 @@ def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf - @property - def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: - """APIs to manage workspace level settings.""" - return self._workspace_settings_v2 - - @property - def forecasting(self) -> pkg_ml.ForecastingAPI: - """The Forecasting API allows you to create and get serverless forecasting experiments.""" - return self._forecasting - @property def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" return self._workspace_iam_v2 + @property + def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: + """APIs to manage workspace level settings.""" + return self._workspace_settings_v2 + @property def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" @@ -1109,11 +1109,13 @@ def __init__( self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client) self._billable_usage = pkg_billing.BillableUsageAPI(self._api_client) self._budget_policy = pkg_billing.BudgetPolicyAPI(self._api_client) + self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._credentials = pkg_provisioning.CredentialsAPI(self._api_client) self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client) self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client) self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client) self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client) + self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client) self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client) self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client) @@ -1138,8 +1140,6 @@ def __init__( self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client) self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client) - self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) - self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._groups = pkg_iam.AccountGroupsAPI(self._api_client) self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) self._users = pkg_iam.AccountUsersAPI(self._api_client) @@ -1167,6 +1167,11 @@ def budget_policy(self) -> pkg_billing.BudgetPolicyAPI: """A service serves REST API about Budget policies.""" return self._budget_policy + @property + def budgets(self) -> pkg_billing.BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + @property def credentials(self) -> pkg_provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -1192,6 +1197,11 @@ def groups_v2(self) -> pkg_iam.AccountGroupsV2API: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups_v2 + @property + def iam_v2(self) -> pkg_iamv2.AccountIamV2API: + """These APIs are used to manage identities and the workspace access of these identities in .""" + return self._iam_v2 + @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" @@ -1312,16 +1322,6 @@ def workspaces(self) -> pkg_provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces - @property - def iam_v2(self) -> pkg_iamv2.AccountIamV2API: - """These APIs are used to manage identities and the workspace access of these identities in .""" - return self._iam_v2 - - @property - def budgets(self) -> pkg_billing.BudgetsAPI: - """These APIs manage budget configurations for this account.""" - return self._budgets - @property def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 0686cb640..ff61e2396 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -2296,6 +2296,9 @@ class CreateFunctionSqlDataAccess(Enum): @dataclass class CreateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + metastore_id: str """The unique ID of the metastore.""" @@ -2303,9 +2306,6 @@ class CreateMetastoreAssignment: """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2780,24 +2780,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) -@dataclass -class DeleteAliasResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: - """Deserializes the DeleteAliasResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialResponse: def as_dict(self) -> dict: @@ -10215,6 +10197,9 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: @dataclass class UpdateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" @@ -10222,9 +10207,6 @@ class UpdateMetastoreAssignment: metastore_id: Optional[str] = None """The unique ID of the metastore.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 57ea7e961..fd98e4e32 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -10,7 +10,7 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum _LOG = logging.getLogger("databricks.sdk") @@ -1080,7 +1080,7 @@ def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} if self.compliance_standards: - body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards] + body["compliance_standards"] = [v.value for v in self.compliance_standards] if self.is_enabled is not None: body["is_enabled"] = self.is_enabled return body @@ -1098,7 +1098,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" return cls( - compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard), + compliance_standards=_repeated_enum(d, "compliance_standards", settings.ComplianceStandard), is_enabled=d.get("is_enabled", None), ) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 20a212cc8..bba3e1880 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -8842,11 +8842,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: } op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers) - return Wait( - self.wait_get_cluster_terminated, - response=DeleteClusterResponse.from_dict(op_response), - cluster_id=cluster_id, - ) + return Wait(self.wait_get_cluster_terminated, cluster_id=cluster_id) def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.delete(cluster_id=cluster_id).result(timeout=timeout) @@ -9108,9 +9104,7 @@ def edit( } op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def edit_and_wait( self, @@ -9472,9 +9466,7 @@ def resize( } op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def resize_and_wait( self, @@ -9509,9 +9501,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai } op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def restart_and_wait( self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20) @@ -9578,9 +9568,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: } op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.start(cluster_id=cluster_id).result(timeout=timeout) @@ -9651,9 +9639,7 @@ def update( } op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) def update_and_wait( self, @@ -9834,7 +9820,6 @@ def cancel( op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers) return Wait( self.wait_command_status_command_execution_cancelled, - response=CancelResponse.from_dict(op_response), cluster_id=cluster_id, command_id=command_id, context_id=context_id, diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 2117a09f3..c63285120 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -50,24 +50,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: return cls() -@dataclass -class CreateDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateDirectoryResponse: - """Deserializes the CreateDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class CreateResponse: handle: Optional[int] = None @@ -94,24 +76,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: return cls(handle=d.get("handle", None)) -@dataclass -class DeleteDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDirectoryResponse: - """Deserializes the DeleteDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -289,24 +253,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FileInfo: ) -@dataclass -class GetDirectoryMetadataResponse: - def as_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetDirectoryMetadataResponse: - """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" - return cls() - - @dataclass class GetMetadataResponse: content_length: Optional[int] = None @@ -495,24 +441,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ReadResponse: return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None)) -@dataclass -class UploadResponse: - def as_dict(self) -> dict: - """Serializes the UploadResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UploadResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UploadResponse: - """Deserializes the UploadResponse from a dictionary.""" - return cls() - - class DbfsAPI: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index a470d7544..6627b7a87 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -500,24 +500,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ConsistencyToken: return cls(value=d.get("value", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteWorkspacePermissionAssignmentResponse: def as_dict(self) -> dict: @@ -1464,24 +1446,6 @@ class PatchOp(Enum): REPLACE = "replace" -@dataclass -class PatchResponse: - def as_dict(self) -> dict: - """Serializes the PatchResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchResponse: - """Deserializes the PatchResponse from a dictionary.""" - return cls() - - class PatchSchema(Enum): URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp" diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 6c35188bc..fe11a479d 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -457,42 +457,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: ) -@dataclass -class CancelAllRunsResponse: - def as_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: - """Deserializes the CancelAllRunsResponse from a dictionary.""" - return cls() - - -@dataclass -class CancelRunResponse: - def as_dict(self) -> dict: - """Serializes the CancelRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse: - """Deserializes the CancelRunResponse from a dictionary.""" - return cls() - - class CleanRoomTaskRunLifeCycleState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition""" @@ -1520,42 +1484,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtTask: ) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteRunResponse: - def as_dict(self) -> dict: - """Serializes the DeleteRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: - """Deserializes the DeleteRunResponse from a dictionary.""" - return cls() - - @dataclass class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Represents a change to the job cluster's settings that would be required for the job clusters to @@ -4307,24 +4235,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: return cls(repair_id=d.get("repair_id", None)) -@dataclass -class ResetResponse: - def as_dict(self) -> dict: - """Serializes the ResetResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResetResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResetResponse: - """Deserializes the ResetResponse from a dictionary.""" - return cls() - - @dataclass class ResolvedConditionTaskValues: left: Optional[str] = None @@ -8263,24 +8173,6 @@ class TriggerType(Enum): TABLE = "TABLE" -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - @dataclass class ViewItem: content: Optional[str] = None @@ -8552,11 +8444,7 @@ def cancel_run(self, run_id: int) -> Wait[Run]: } op_response = self._api.do("POST", "/api/2.2/jobs/runs/cancel", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=CancelRunResponse.from_dict(op_response), - run_id=run_id, - ) + return Wait(self.wait_get_run_job_terminated_or_skipped, run_id=run_id) def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run: return self.cancel_run(run_id=run_id).result(timeout=timeout) diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index d8d236711..ea4b8e489 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -194,24 +194,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePublishedAppIntegrationOutput: return cls() -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class FederationPolicy: create_time: Optional[str] = None diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index d34c23448..9966856ec 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -3942,9 +3942,7 @@ def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]: } op_response = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/stop", headers=headers) - return Wait( - self.wait_get_pipeline_idle, response=StopPipelineResponse.from_dict(op_response), pipeline_id=pipeline_id - ) + return Wait(self.wait_get_pipeline_idle, pipeline_id=pipeline_id) def stop_and_wait(self, pipeline_id: str, timeout=timedelta(minutes=20)) -> GetPipelineResponse: return self.stop(pipeline_id=pipeline_id).result(timeout=timeout) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index f707aadf7..f1ffd2ff9 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1034,24 +1034,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DataframeSplitInput: return cls(columns=d.get("columns", None), data=d.get("data", None), index=d.get("index", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class EmailNotifications: on_update_failure: Optional[List[str]] = None @@ -1139,15 +1121,15 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum): @dataclass class EndpointCoreConfigInput: + name: str + """The name of the serving endpoint to update. This field is required.""" + auto_capture_config: Optional[AutoCaptureConfigInput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - name: Optional[str] = None - """The name of the serving endpoint to update. This field is required.""" - served_entities: Optional[List[ServedEntityInput]] = None """The list of served entities under the serving endpoint config.""" diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 3004f17da..a5f09f3af 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1694,24 +1694,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePersonalComputeSettingResponse: return cls(etag=d.get("etag", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteRestrictWorkspaceAdminsSettingResponse: """The etag is returned.""" @@ -4548,24 +4530,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: ) -@dataclass -class ReplaceResponse: - def as_dict(self) -> dict: - """Serializes the ReplaceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: - """Deserializes the ReplaceResponse from a dictionary.""" - return cls() - - @dataclass class RestrictWorkspaceAdminsMessage: status: RestrictWorkspaceAdminsMessageStatus @@ -4664,24 +4628,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenResponse: return cls() -@dataclass -class SetStatusResponse: - def as_dict(self) -> dict: - """Serializes the SetStatusResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: - """Deserializes the SetStatusResponse from a dictionary.""" - return cls() - - @dataclass class SlackConfig: channel_id: Optional[str] = None @@ -5210,24 +5156,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: ) -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - WorkspaceConf = Dict[str, str] diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 42f1f505c..ea9c97bc7 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -55,24 +55,6 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeltaSharingDependency: """Represents a UC dependency.""" @@ -3471,7 +3453,7 @@ def share_permissions( owner of the share. :param name: str - The name of the share. + The name of the Recipient. :param max_results: int (optional) Maximum number of permissions to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 0b4acc46e..5765fa21b 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1130,24 +1130,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseChunkInfo: ) -@dataclass -class CancelExecutionResponse: - def as_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelExecutionResponse: - """Deserializes the CancelExecutionResponse from a dictionary.""" - return cls() - - @dataclass class Channel: """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be @@ -8848,10 +8830,10 @@ def delete(self, id: str): def update( self, - id: str, *, created_at: Optional[str] = None, description: Optional[str] = None, + id: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[LegacyQuery] = None, @@ -8865,11 +8847,11 @@ def update( [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - :param id: str - The UUID for this visualization. :param created_at: str (optional) :param description: str (optional) A short description of this visualization. This is not displayed in the UI. + :param id: str (optional) + The UUID for this visualization. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. :param options: Any (optional) @@ -8887,6 +8869,8 @@ def update( body["created_at"] = created_at if description is not None: body["description"] = description + if id is not None: + body["id"] = id if name is not None: body["name"] = name if options is not None: @@ -9650,7 +9634,7 @@ def edit( } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/edit", body=body, headers=headers) - return Wait(self.wait_get_warehouse_running, response=EditWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_running, id=id) def edit_and_wait( self, @@ -9902,7 +9886,7 @@ def start(self, id: str) -> Wait[GetWarehouseResponse]: } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/start", headers=headers) - return Wait(self.wait_get_warehouse_running, response=StartWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_running, id=id) def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.start(id=id).result(timeout=timeout) @@ -9923,7 +9907,7 @@ def stop(self, id: str) -> Wait[GetWarehouseResponse]: } op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/stop", headers=headers) - return Wait(self.wait_get_warehouse_stopped, response=StopWarehouseResponse.from_dict(op_response), id=id) + return Wait(self.wait_get_warehouse_stopped, id=id) def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.stop(id=id).result(timeout=timeout) diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index d9a1cafca..f19a67754 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -227,24 +227,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: ) -@dataclass -class CreateScopeResponse: - def as_dict(self) -> dict: - """Serializes the CreateScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateScopeResponse: - """Deserializes the CreateScopeResponse from a dictionary.""" - return cls() - - @dataclass class CredentialInfo: credential_id: int @@ -305,24 +287,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: ) -@dataclass -class DeleteAclResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAclResponse: - """Deserializes the DeleteAclResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialsResponse: def as_dict(self) -> dict: @@ -377,24 +341,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteScopeResponse: - def as_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: - """Deserializes the DeleteScopeResponse from a dictionary.""" - return cls() - - @dataclass class DeleteSecretResponse: def as_dict(self) -> dict: @@ -999,42 +945,6 @@ class ObjectType(Enum): REPO = "REPO" -@dataclass -class PutAclResponse: - def as_dict(self) -> dict: - """Serializes the PutAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: - """Deserializes the PutAclResponse from a dictionary.""" - return cls() - - -@dataclass -class PutSecretResponse: - def as_dict(self) -> dict: - """Serializes the PutSecretResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutSecretResponse: - """Deserializes the PutSecretResponse from a dictionary.""" - return cls() - - @dataclass class RepoAccessControlRequest: group_name: Optional[str] = None