From 555a8b93c760c69870fa6dd6fcd8353d313b34d9 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 30 Oct 2025 11:45:11 +0000 Subject: [PATCH] Update SDK to latest API Definition --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + NEXT_CHANGELOG.md | 16 ++ databricks/sdk/__init__.py | 48 ++--- databricks/sdk/service/agentbricks.py | 2 + databricks/sdk/service/apps.py | 10 + databricks/sdk/service/billing.py | 16 +- databricks/sdk/service/catalog.py | 175 +++++++++++----- databricks/sdk/service/cleanrooms.py | 14 +- databricks/sdk/service/compute.py | 54 +++++ databricks/sdk/service/dashboards.py | 10 + databricks/sdk/service/database.py | 12 ++ databricks/sdk/service/dataquality.py | 4 + databricks/sdk/service/files.py | 79 +------- databricks/sdk/service/iam.py | 62 +++--- databricks/sdk/service/iamv2.py | 6 + databricks/sdk/service/jobs.py | 129 ++---------- databricks/sdk/service/marketplace.py | 18 ++ databricks/sdk/service/ml.py | 188 ++++++++++++++++-- databricks/sdk/service/oauth2.py | 28 +-- databricks/sdk/service/pipelines.py | 23 +++ databricks/sdk/service/provisioning.py | 9 + databricks/sdk/service/qualitymonitorv2.py | 2 + databricks/sdk/service/serving.py | 37 ++-- databricks/sdk/service/settings.py | 115 ++++------- databricks/sdk/service/settingsv2.py | 2 + databricks/sdk/service/sharing.py | 54 +++-- databricks/sdk/service/sql.py | 72 ++++--- databricks/sdk/service/tags.py | 2 + databricks/sdk/service/vectorsearch.py | 8 + databricks/sdk/service/workspace.py | 109 ++-------- docs/account/iam/workspace_assignment.rst | 8 +- docs/account/provisioning/credentials.rst | 6 +- docs/account/provisioning/storage.rst | 7 +- docs/dbdataclasses/apps.rst | 6 + docs/dbdataclasses/catalog.rst | 8 +- docs/dbdataclasses/compute.rst | 9 + docs/dbdataclasses/files.rst | 16 -- docs/dbdataclasses/iam.rst | 8 - docs/dbdataclasses/jobs.rst | 24 --- docs/dbdataclasses/ml.rst | 12 ++ docs/dbdataclasses/oauth2.rst | 4 - docs/dbdataclasses/serving.rst | 4 - docs/dbdataclasses/settings.rst | 16 -- docs/dbdataclasses/sharing.rst | 4 - docs/dbdataclasses/sql.rst | 15 +- docs/dbdataclasses/workspace.rst | 20 -- docs/workspace/catalog/catalogs.rst | 7 +- docs/workspace/catalog/connections.rst | 3 +- docs/workspace/catalog/credentials.rst | 4 + .../catalog/entity_tag_assignments.rst | 4 + docs/workspace/catalog/external_locations.rst | 30 ++- docs/workspace/catalog/functions.rst | 3 +- docs/workspace/catalog/grants.rst | 14 ++ docs/workspace/catalog/metastores.rst | 3 +- docs/workspace/catalog/model_versions.rst | 4 + docs/workspace/catalog/policies.rst | 4 + docs/workspace/catalog/registered_models.rst | 4 + docs/workspace/catalog/resource_quotas.rst | 4 + docs/workspace/catalog/schemas.rst | 3 +- .../workspace/catalog/storage_credentials.rst | 18 +- docs/workspace/catalog/system_schemas.rst | 3 +- docs/workspace/catalog/tables.rst | 7 +- docs/workspace/catalog/volumes.rst | 4 + docs/workspace/catalog/workspace_bindings.rst | 7 + docs/workspace/compute/clusters.rst | 3 +- docs/workspace/files/files.rst | 4 +- docs/workspace/iam/current_user.rst | 2 +- docs/workspace/iam/permissions.rst | 2 +- docs/workspace/jobs/jobs.rst | 16 +- docs/workspace/ml/model_registry.rst | 13 +- docs/workspace/pipelines/pipelines.rst | 8 +- docs/workspace/sharing/providers.rst | 17 +- docs/workspace/sharing/recipients.rst | 4 +- docs/workspace/sharing/shares.rst | 23 ++- docs/workspace/sql/queries.rst | 2 +- .../sql/query_visualizations_legacy.rst | 6 +- docs/workspace/workspace/workspace.rst | 4 +- tests/databricks/sdk/service/httpcallv2.py | 2 + .../sdk/service/idempotencytesting.py | 3 + tests/databricks/sdk/service/lrotesting.py | 1 + tests/generated/test_idempotency.py | 134 +++++++++++++ 82 files changed, 1067 insertions(+), 777 deletions(-) create mode 100755 tests/generated/test_idempotency.py diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index aa1180c39..b7f56b6d8 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b54bbd860200d735fa2c306ec1559090625370e6 \ No newline at end of file +e2018bb00cba203508f8afe5a6d41bd49789ba25 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 8bee41bad..28ea4a477 100755 --- a/.gitattributes +++ b/.gitattributes @@ -33,5 +33,6 @@ databricks/sdk/service/tags.py linguist-generated=true databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true test_http_call.py linguist-generated=true +test_idempotency.py linguist-generated=true test_json_marshall.py linguist-generated=true test_lro_call.py linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index efa358167..7b0088e91 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,19 @@ ### Internal Changes ### API Changes +* Add `instance_profile_arn` field for `databricks.sdk.service.compute.InstancePoolAwsAttributes`. +* Add `continuous`, `sliding` and `tumbling` fields for `databricks.sdk.service.ml.TimeWindow`. +* Add `usage_policy_id` field for `databricks.sdk.service.pipelines.CreatePipeline`. +* Add `usage_policy_id` field for `databricks.sdk.service.pipelines.EditPipeline`. +* Add `usage_policy_id` field for `databricks.sdk.service.pipelines.PipelineSpec`. +* Add `read_files_bytes` field for `databricks.sdk.service.sql.QueryMetrics`. +* Add `select` enum value for `databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurablePermission`. +* Add `table` enum value for `databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurableType`. +* Add `decommission_started` and `decommission_ended` enum values for `databricks.sdk.service.compute.EventType`. +* Add `dbr_image_resolution_failure` enum value for `databricks.sdk.service.compute.TerminationReasonCode`. +* Add `dbr_image_resolution_failure` enum value for `databricks.sdk.service.sql.TerminationReasonCode`. +* [Breaking] Change `offline_store_config` and `online_store_config` fields for `databricks.sdk.service.ml.MaterializedFeature` to no longer be required. +* Change `offline_store_config` and `online_store_config` fields for `databricks.sdk.service.ml.MaterializedFeature` to no longer be required. +* [Breaking] Change `lifecycle_state` field for `databricks.sdk.service.sql.AlertV2` to type `databricks.sdk.service.sql.AlertLifecycleState` dataclass. +* [Breaking] Remove `table` field for `databricks.sdk.service.jobs.TriggerSettings`. +* [Breaking] Remove `duration` and `offset` fields for `databricks.sdk.service.ml.TimeWindow`. \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index dd8b7f796..0d285ccda 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -293,6 +293,7 @@ def __init__( self._feature_engineering = pkg_ml.FeatureEngineeringAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) + self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._functions = pkg_catalog.FunctionsAPI(self._api_client) self._genie = pkg_dashboards.GenieAPI(self._api_client) self._git_credentials = pkg_workspace.GitCredentialsAPI(self._api_client) @@ -375,9 +376,8 @@ def __init__( self._workspace = WorkspaceExt(self._api_client) self._workspace_bindings = pkg_catalog.WorkspaceBindingsAPI(self._api_client) self._workspace_conf = pkg_settings.WorkspaceConfAPI(self._api_client) - self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) - self._forecasting = pkg_ml.ForecastingAPI(self._api_client) self._workspace_iam_v2 = pkg_iamv2.WorkspaceIamV2API(self._api_client) + self._workspace_settings_v2 = pkg_settingsv2.WorkspaceSettingsV2API(self._api_client) self._groups = pkg_iam.GroupsAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._users = pkg_iam.UsersAPI(self._api_client) @@ -599,6 +599,11 @@ def feature_store(self) -> pkg_ml.FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features.""" return self._feature_store + @property + def forecasting(self) -> pkg_ml.ForecastingAPI: + """The Forecasting API allows you to create and get serverless forecasting experiments.""" + return self._forecasting + @property def functions(self) -> pkg_catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" @@ -974,21 +979,16 @@ def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf - @property - def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: - """APIs to manage workspace level settings.""" - return self._workspace_settings_v2 - - @property - def forecasting(self) -> pkg_ml.ForecastingAPI: - """The Forecasting API allows you to create and get serverless forecasting experiments.""" - return self._forecasting - @property def workspace_iam_v2(self) -> pkg_iamv2.WorkspaceIamV2API: """These APIs are used to manage identities and the workspace access of these identities in .""" return self._workspace_iam_v2 + @property + def workspace_settings_v2(self) -> pkg_settingsv2.WorkspaceSettingsV2API: + """APIs to manage workspace level settings.""" + return self._workspace_settings_v2 + @property def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" @@ -1086,11 +1086,13 @@ def __init__( self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client) self._billable_usage = pkg_billing.BillableUsageAPI(self._api_client) self._budget_policy = pkg_billing.BudgetPolicyAPI(self._api_client) + self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._credentials = pkg_provisioning.CredentialsAPI(self._api_client) self._custom_app_integration = pkg_oauth2.CustomAppIntegrationAPI(self._api_client) self._encryption_keys = pkg_provisioning.EncryptionKeysAPI(self._api_client) self._federation_policy = pkg_oauth2.AccountFederationPolicyAPI(self._api_client) self._groups_v2 = pkg_iam.AccountGroupsV2API(self._api_client) + self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) self._ip_access_lists = pkg_settings.AccountIpAccessListsAPI(self._api_client) self._log_delivery = pkg_billing.LogDeliveryAPI(self._api_client) self._metastore_assignments = pkg_catalog.AccountMetastoreAssignmentsAPI(self._api_client) @@ -1114,8 +1116,6 @@ def __init__( self._workspace_assignment = pkg_iam.WorkspaceAssignmentAPI(self._api_client) self._workspace_network_configuration = pkg_settings.WorkspaceNetworkConfigurationAPI(self._api_client) self._workspaces = pkg_provisioning.WorkspacesAPI(self._api_client) - self._iam_v2 = pkg_iamv2.AccountIamV2API(self._api_client) - self._budgets = pkg_billing.BudgetsAPI(self._api_client) self._groups = pkg_iam.AccountGroupsAPI(self._api_client) self._service_principals = pkg_iam.AccountServicePrincipalsAPI(self._api_client) self._users = pkg_iam.AccountUsersAPI(self._api_client) @@ -1143,6 +1143,11 @@ def budget_policy(self) -> pkg_billing.BudgetPolicyAPI: """A service serves REST API about Budget policies.""" return self._budget_policy + @property + def budgets(self) -> pkg_billing.BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + @property def credentials(self) -> pkg_provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -1168,6 +1173,11 @@ def groups_v2(self) -> pkg_iam.AccountGroupsV2API: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups_v2 + @property + def iam_v2(self) -> pkg_iamv2.AccountIamV2API: + """These APIs are used to manage identities and the workspace access of these identities in .""" + return self._iam_v2 + @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" @@ -1283,16 +1293,6 @@ def workspaces(self) -> pkg_provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces - @property - def iam_v2(self) -> pkg_iamv2.AccountIamV2API: - """These APIs are used to manage identities and the workspace access of these identities in .""" - return self._iam_v2 - - @property - def budgets(self) -> pkg_billing.BudgetsAPI: - """These APIs manage budget configurations for this account.""" - return self._budgets - @property def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" diff --git a/databricks/sdk/service/agentbricks.py b/databricks/sdk/service/agentbricks.py index b2a560e31..b5242a314 100755 --- a/databricks/sdk/service/agentbricks.py +++ b/databricks/sdk/service/agentbricks.py @@ -238,6 +238,7 @@ def create_custom_llm( :returns: :class:`CustomLlm` """ + body = {} if agent_artifact_path is not None: body["agent_artifact_path"] = agent_artifact_path @@ -328,6 +329,7 @@ def update_custom_llm(self, id: str, custom_llm: CustomLlm, update_mask: str) -> :returns: :class:`CustomLlm` """ + body = {} if custom_llm is not None: body["custom_llm"] = custom_llm.as_dict() diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index bbe13675a..f22edec9b 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -790,11 +790,13 @@ class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum): MANAGE = "MANAGE" READ_VOLUME = "READ_VOLUME" + SELECT = "SELECT" WRITE_VOLUME = "WRITE_VOLUME" class AppManifestAppResourceUcSecurableSpecUcSecurableType(Enum): + TABLE = "TABLE" VOLUME = "VOLUME" @@ -1867,6 +1869,7 @@ def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ + body = app.as_dict() query = {} if no_compute is not None: @@ -1903,6 +1906,7 @@ def create_update(self, app_name: str, update_mask: str, *, app: Optional[App] = Long-running operation waiter for :class:`AppUpdate`. See :method:wait_get_update_app_succeeded for more details. """ + body = {} if app is not None: body["app"] = app.as_dict() @@ -1949,6 +1953,7 @@ def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeploy Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. """ + body = app_deployment.as_dict() headers = { "Accept": "application/json", @@ -2124,6 +2129,7 @@ def set_permissions( :returns: :class:`AppPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -2189,6 +2195,7 @@ def update(self, name: str, app: App) -> App: :returns: :class:`App` """ + body = app.as_dict() headers = { "Accept": "application/json", @@ -2209,6 +2216,7 @@ def update_permissions( :returns: :class:`AppPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -2234,6 +2242,7 @@ def create_custom_template(self, template: CustomTemplate) -> CustomTemplate: :returns: :class:`CustomTemplate` """ + body = template.as_dict() headers = { "Accept": "application/json", @@ -2316,6 +2325,7 @@ def update_custom_template(self, name: str, template: CustomTemplate) -> CustomT :returns: :class:`CustomTemplate` """ + body = template.as_dict() headers = { "Accept": "application/json", diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 3758028c2..77ef2f792 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +import uuid from dataclasses import dataclass from enum import Enum from typing import Any, BinaryIO, Dict, Iterator, List, Optional @@ -1067,9 +1068,6 @@ class LogDeliveryConfiguration: [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - account_id: str - """Databricks account ID.""" - credentials_id: str """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page. See [Configure billable @@ -1083,6 +1081,9 @@ class LogDeliveryConfiguration: [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" + account_id: Optional[str] = None + """Databricks account ID.""" + config_id: Optional[str] = None """The unique UUID of log delivery configuration""" @@ -1565,6 +1566,9 @@ def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[ :returns: :class:`BudgetPolicy` """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) body = {} if policy is not None: body["policy"] = policy.as_dict() @@ -1679,6 +1683,7 @@ def update( :returns: :class:`BudgetPolicy` """ + body = policy.as_dict() query = {} if limit_config is not None: @@ -1715,6 +1720,7 @@ def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigu :returns: :class:`CreateBudgetConfigurationResponse` """ + body = {} if budget is not None: body["budget"] = budget.as_dict() @@ -1797,6 +1803,7 @@ def update(self, budget_id: str, budget: UpdateBudgetConfigurationBudget) -> Upd :returns: :class:`UpdateBudgetConfigurationResponse` """ + body = {} if budget is not None: body["budget"] = budget.as_dict() @@ -1895,6 +1902,7 @@ def create( :returns: :class:`WrappedLogDeliveryConfiguration` """ + body = {} if log_delivery_configuration is not None: body["log_delivery_configuration"] = log_delivery_configuration.as_dict() @@ -1989,6 +1997,7 @@ def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryCo """ + body = {} if status is not None: body["status"] = status.value @@ -2026,6 +2035,7 @@ def create( :returns: :class:`CreateBillingUsageDashboardResponse` """ + body = {} if dashboard_type is not None: body["dashboard_type"] = dashboard_type.value diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 182f8cf4e..51ce52a40 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1260,7 +1260,7 @@ class CloudflareApiToken: secret_access_key: str """The secret access token generated for the above access key ID.""" - account_id: str + account_id: Optional[str] = None """The ID of the account associated with the API token.""" def as_dict(self) -> dict: @@ -1740,7 +1740,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 46""" + """Next Id: 47""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -2241,6 +2241,9 @@ class CreateFunctionSqlDataAccess(Enum): @dataclass class CreateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + metastore_id: str """The unique ID of the metastore.""" @@ -2248,9 +2251,6 @@ class CreateMetastoreAssignment: """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2726,24 +2726,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) -@dataclass -class DeleteAliasResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: - """Deserializes the DeleteAliasResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialResponse: def as_dict(self) -> dict: @@ -8550,7 +8532,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RowFilterOptions: @dataclass class SchemaInfo: - """Next ID: 42""" + """Next ID: 43""" browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object @@ -8763,7 +8745,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_SALESFORCE_OAUTH_MTLS = 268; Next id:269""" + """Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" @@ -10071,6 +10053,9 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: @dataclass class UpdateMetastoreAssignment: + workspace_id: int + """A workspace ID.""" + default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" @@ -10078,9 +10063,6 @@ class UpdateMetastoreAssignment: metastore_id: Optional[str] = None """The unique ID of the metastore.""" - workspace_id: Optional[int] = None - """A workspace ID.""" - def as_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -10601,6 +10583,7 @@ def create( :returns: :class:`AccountsCreateMetastoreAssignmentResponse` """ + body = {} if metastore_assignment is not None: body["metastore_assignment"] = metastore_assignment.as_dict() @@ -10692,6 +10675,7 @@ def update( :returns: :class:`AccountsUpdateMetastoreAssignmentResponse` """ + body = {} if metastore_assignment is not None: body["metastore_assignment"] = metastore_assignment.as_dict() @@ -10723,6 +10707,7 @@ def create(self, *, metastore_info: Optional[CreateAccountsMetastore] = None) -> :returns: :class:`AccountsCreateMetastoreResponse` """ + body = {} if metastore_info is not None: body["metastore_info"] = metastore_info.as_dict() @@ -10805,6 +10790,7 @@ def update( :returns: :class:`AccountsUpdateMetastoreResponse` """ + body = {} if metastore_info is not None: body["metastore_info"] = metastore_info.as_dict() @@ -10848,6 +10834,7 @@ def create( :returns: :class:`AccountsCreateStorageCredentialInfo` """ + body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() @@ -10962,6 +10949,7 @@ def update( :returns: :class:`AccountsUpdateStorageCredentialResponse` """ + body = {} if credential_info is not None: body["credential_info"] = credential_info.as_dict() @@ -11031,6 +11019,7 @@ def update( :returns: :class:`ArtifactAllowlistInfo` """ + body = {} if artifact_matchers is not None: body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers] @@ -11098,6 +11087,7 @@ def create( :returns: :class:`CatalogInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -11185,8 +11175,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective @@ -11264,6 +11253,7 @@ def update( :returns: :class:`CatalogInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -11331,6 +11321,7 @@ def create( :returns: :class:`ConnectionInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -11391,8 +11382,7 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not @@ -11439,6 +11429,7 @@ def update( :returns: :class:`ConnectionInfo` """ + body = {} if new_name is not None: body["new_name"] = new_name @@ -11509,6 +11500,7 @@ def create_credential( :returns: :class:`CredentialInfo` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -11575,6 +11567,7 @@ def generate_temporary_service_credential( :returns: :class:`TemporaryCredentials` """ + body = {} if azure_options is not None: body["azure_options"] = azure_options.as_dict() @@ -11621,6 +11614,10 @@ def list_credentials( is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param include_unbound: bool (optional) Whether to include credentials not bound to the workspace. Effective only if the user has permission to update the credential–workspace binding. @@ -11709,6 +11706,7 @@ def update_credential( :returns: :class:`CredentialInfo` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -11784,6 +11782,7 @@ def validate_credential( :returns: :class:`ValidateCredentialResponse` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -11835,6 +11834,7 @@ def create(self, tag_assignment: EntityTagAssignment) -> EntityTagAssignment: :returns: :class:`EntityTagAssignment` """ + body = tag_assignment.as_dict() headers = { "Accept": "application/json", @@ -11907,6 +11907,10 @@ def list( ) -> Iterator[EntityTagAssignment]: """List tag assignments for an Unity Catalog entity + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param entity_type: str The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, columns, volumes. @@ -11978,6 +11982,7 @@ def update( :returns: :class:`EntityTagAssignment` """ + body = tag_assignment.as_dict() query = {} if update_mask is not None: @@ -12018,6 +12023,7 @@ def create_external_lineage_relationship( :returns: :class:`ExternalLineageRelationship` """ + body = external_lineage_relationship.as_dict() headers = { "Accept": "application/json", @@ -12113,6 +12119,7 @@ def update_external_lineage_relationship( :returns: :class:`ExternalLineageRelationship` """ + body = external_lineage_relationship.as_dict() query = {} if update_mask is not None: @@ -12186,6 +12193,7 @@ def create( :returns: :class:`ExternalLocationInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -12276,8 +12284,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access @@ -12372,6 +12379,7 @@ def update( :returns: :class:`ExternalLocationInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -12428,6 +12436,7 @@ def create_external_metadata(self, external_metadata: ExternalMetadata) -> Exter :returns: :class:`ExternalMetadata` """ + body = external_metadata.as_dict() headers = { "Accept": "application/json", @@ -12527,6 +12536,7 @@ def update_external_metadata( :returns: :class:`ExternalMetadata` """ + body = external_metadata.as_dict() query = {} if update_mask is not None: @@ -12566,6 +12576,7 @@ def create(self, function_info: CreateFunction) -> FunctionInfo: :returns: :class:`FunctionInfo` """ + body = {} if function_info is not None: body["function_info"] = function_info.as_dict() @@ -12648,8 +12659,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Name of parent catalog for functions of interest. @@ -12709,6 +12719,7 @@ def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: :returns: :class:`FunctionInfo` """ + body = {} if owner is not None: body["owner"] = owner @@ -12746,6 +12757,13 @@ def get( ) -> GetPermissionsResponse: """Gets the permissions for a securable. Does not include inherited permissions. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str Type of securable. :param full_name: str @@ -12796,6 +12814,13 @@ def get_effective( """Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str Type of securable. :param full_name: str @@ -12853,6 +12878,7 @@ def update( :returns: :class:`UpdatePermissionsResponse` """ + body = {} if changes is not None: body["changes"] = [v.as_dict() for v in changes] @@ -12898,6 +12924,7 @@ def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str """ + body = {} if default_catalog_name is not None: body["default_catalog_name"] = default_catalog_name @@ -12925,6 +12952,7 @@ def create(self, name: str, *, region: Optional[str] = None, storage_root: Optio :returns: :class:`MetastoreInfo` """ + body = {} if name is not None: body["name"] = name @@ -13000,8 +13028,7 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param max_results: int (optional) Maximum number of metastores to return. - when set to a value greater than 0, the page length is the @@ -13107,6 +13134,7 @@ def update( :returns: :class:`MetastoreInfo` """ + body = {} if delta_sharing_organization_name is not None: body["delta_sharing_organization_name"] = delta_sharing_organization_name @@ -13150,6 +13178,7 @@ def update_assignment( """ + body = {} if default_catalog_name is not None: body["default_catalog_name"] = default_catalog_name @@ -13284,6 +13313,10 @@ def list( There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) @@ -13397,6 +13430,7 @@ def update( :returns: :class:`ModelVersionInfo` """ + body = {} if aliases is not None: body["aliases"] = [v.as_dict() for v in aliases] @@ -13488,6 +13522,7 @@ def create(self, table: OnlineTable) -> Wait[OnlineTable]: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. """ + body = table.as_dict() headers = { "Accept": "application/json", @@ -13555,6 +13590,7 @@ def create_policy(self, policy_info: PolicyInfo) -> PolicyInfo: :returns: :class:`PolicyInfo` """ + body = policy_info.as_dict() headers = { "Accept": "application/json", @@ -13624,6 +13660,10 @@ def list_policies( """List all policies defined on a securable. Optionally, the list can include inherited policies defined on the securable's parent schema or catalog. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param on_securable_type: str Required. The type of the securable to list policies for. :param on_securable_fullname: str @@ -13696,6 +13736,7 @@ def update_policy( :returns: :class:`PolicyInfo` """ + body = policy_info.as_dict() query = {} if update_mask is not None: @@ -13815,6 +13856,7 @@ def create( :returns: :class:`MonitorInfo` """ + body = {} if assets_dir is not None: body["assets_dir"] = assets_dir @@ -13978,6 +14020,7 @@ def regenerate_dashboard( :returns: :class:`RegenerateDashboardResponse` """ + body = {} if warehouse_id is not None: body["warehouse_id"] = warehouse_id @@ -14082,6 +14125,7 @@ def update( :returns: :class:`MonitorInfo` """ + body = {} if baseline_table_name is not None: body["baseline_table_name"] = baseline_table_name @@ -14205,6 +14249,7 @@ def create( :returns: :class:`RegisteredModelInfo` """ + body = {} if aliases is not None: body["aliases"] = [v.as_dict() for v in aliases] @@ -14330,6 +14375,10 @@ def list( There is no guarantee of a specific ordering of the elements in the response. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. @@ -14399,6 +14448,7 @@ def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredM :returns: :class:`RegisteredModelAlias` """ + body = {} if version_num is not None: body["version_num"] = version_num @@ -14473,6 +14523,7 @@ def update( :returns: :class:`RegisteredModelInfo` """ + body = {} if aliases is not None: body["aliases"] = [v.as_dict() for v in aliases] @@ -14555,6 +14606,10 @@ def list_quotas( """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) @@ -14613,6 +14668,7 @@ def batch_create_access_requests( :returns: :class:`BatchCreateAccessRequestsResponse` """ + body = {} if requests is not None: body["requests"] = [v.as_dict() for v in requests] @@ -14677,6 +14733,7 @@ def update_access_request_destinations( :returns: :class:`AccessRequestDestinations` """ + body = access_request_destinations.as_dict() query = {} if update_mask is not None: @@ -14724,6 +14781,7 @@ def create( :returns: :class:`SchemaInfo` """ + body = {} if catalog_name is not None: body["catalog_name"] = catalog_name @@ -14805,8 +14863,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Parent catalog for schemas of interest. @@ -14876,6 +14933,7 @@ def update( :returns: :class:`SchemaInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -14952,6 +15010,7 @@ def create( :returns: :class:`StorageCredentialInfo` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -15035,8 +15094,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_unbound: bool (optional) Whether to include credentials not bound to the workspace. Effective only if the user has permission @@ -15125,6 +15183,7 @@ def update( :returns: :class:`StorageCredentialInfo` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -15202,6 +15261,7 @@ def validate( :returns: :class:`ValidateStorageCredentialResponse` """ + body = {} if aws_iam_role is not None: body["aws_iam_role"] = aws_iam_role.as_dict() @@ -15270,6 +15330,7 @@ def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[ """ + body = {} if catalog_name is not None: body["catalog_name"] = catalog_name @@ -15296,8 +15357,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param metastore_id: str The ID for the metastore in which the system schema resides. @@ -15364,6 +15424,7 @@ def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstr :returns: :class:`TableConstraint` """ + body = {} if constraint is not None: body["constraint"] = constraint.as_dict() @@ -15471,6 +15532,7 @@ def create( :returns: :class:`TableInfo` """ + body = {} if catalog_name is not None: body["catalog_name"] = catalog_name @@ -15600,8 +15662,7 @@ def list( PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Name of parent catalog for tables of interest. @@ -15683,6 +15744,10 @@ def list_summaries( There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) @@ -15742,6 +15807,7 @@ def update(self, full_name: str, *, owner: Optional[str] = None): """ + body = {} if owner is not None: body["owner"] = owner @@ -15803,6 +15869,7 @@ def generate_temporary_path_credentials( :returns: :class:`GenerateTemporaryPathCredentialResponse` """ + body = {} if dry_run is not None: body["dry_run"] = dry_run @@ -15853,6 +15920,7 @@ def generate_temporary_table_credentials( :returns: :class:`GenerateTemporaryTableCredentialResponse` """ + body = {} if operation is not None: body["operation"] = operation.value @@ -15923,6 +15991,7 @@ def create( :returns: :class:`VolumeInfo` """ + body = {} if catalog_name is not None: body["catalog_name"] = catalog_name @@ -15980,6 +16049,10 @@ def list( There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str The identifier of the catalog :param schema_name: str @@ -16078,6 +16151,7 @@ def update( :returns: :class:`VolumeInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -16141,6 +16215,13 @@ def get_bindings( """Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -16199,6 +16280,7 @@ def update( :returns: :class:`UpdateCatalogWorkspaceBindingsResponse` """ + body = {} if assign_workspaces is not None: body["assign_workspaces"] = [v for v in assign_workspaces] @@ -16239,6 +16321,7 @@ def update_bindings( :returns: :class:`UpdateWorkspaceBindingsResponse` """ + body = {} if add is not None: body["add"] = [v.as_dict() for v in add] diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 299d623e3..68f5f5712 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -12,7 +12,7 @@ from databricks.sdk.service import catalog, jobs, settings, sharing from databricks.sdk.service._internal import (Wait, _enum, _from_dict, - _repeated_dict) + _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") @@ -1080,7 +1080,7 @@ def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} if self.compliance_standards: - body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards] + body["compliance_standards"] = [v.value for v in self.compliance_standards] if self.is_enabled is not None: body["is_enabled"] = self.is_enabled return body @@ -1098,7 +1098,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" return cls( - compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard), + compliance_standards=_repeated_enum(d, "compliance_standards", settings.ComplianceStandard), is_enabled=d.get("is_enabled", None), ) @@ -1495,6 +1495,7 @@ def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: :returns: :class:`CleanRoomAsset` """ + body = asset.as_dict() headers = { "Accept": "application/json", @@ -1524,6 +1525,7 @@ def create_clean_room_asset_review( :returns: :class:`CreateCleanRoomAssetReviewResponse` """ + body = {} if notebook_review is not None: body["notebook_review"] = notebook_review.as_dict() @@ -1635,6 +1637,7 @@ def update( :returns: :class:`CleanRoomAsset` """ + body = asset.as_dict() headers = { "Accept": "application/json", @@ -1666,6 +1669,7 @@ def create(self, clean_room_name: str, auto_approval_rule: CleanRoomAutoApproval :returns: :class:`CleanRoomAutoApprovalRule` """ + body = {} if auto_approval_rule is not None: body["auto_approval_rule"] = auto_approval_rule.as_dict() @@ -1760,6 +1764,7 @@ def update( :returns: :class:`CleanRoomAutoApprovalRule` """ + body = auto_approval_rule.as_dict() headers = { "Accept": "application/json", @@ -1869,6 +1874,7 @@ def create(self, clean_room: CleanRoom) -> Wait[CleanRoom]: Long-running operation waiter for :class:`CleanRoom`. See :method:wait_get_clean_room_active for more details. """ + body = clean_room.as_dict() headers = { "Accept": "application/json", @@ -1894,6 +1900,7 @@ def create_output_catalog( :returns: :class:`CreateCleanRoomOutputCatalogResponse` """ + body = output_catalog.as_dict() headers = { "Accept": "application/json", @@ -1979,6 +1986,7 @@ def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanR :returns: :class:`CleanRoom` """ + body = {} if clean_room is not None: body["clean_room"] = clean_room.as_dict() diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index c6f9bd35d..49f3c3b05 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -3377,6 +3377,8 @@ class EventType(Enum): CLUSTER_MIGRATED = "CLUSTER_MIGRATED" CREATING = "CREATING" DBFS_DOWN = "DBFS_DOWN" + DECOMMISSION_ENDED = "DECOMMISSION_ENDED" + DECOMMISSION_STARTED = "DECOMMISSION_STARTED" DID_NOT_EXPAND_DISK = "DID_NOT_EXPAND_DISK" DRIVER_HEALTHY = "DRIVER_HEALTHY" DRIVER_NOT_RESPONDING = "DRIVER_NOT_RESPONDING" @@ -4823,6 +4825,16 @@ class InstancePoolAwsAttributes: availability: Optional[InstancePoolAwsAttributesAvailability] = None """Availability type used for the spot nodes.""" + instance_profile_arn: Optional[str] = None + """All AWS instances belonging to the instance pool will have this instance profile. If omitted, + instances will initially be launched with the workspace's default instance profile. If defined, + clusters that use the pool will inherit the instance profile, and must not specify their own + instance profile on cluster creation or update. If the pool does not specify an instance + profile, clusters using the pool may specify any instance profile. The instance profile must + have previously been added to the Databricks environment by an account administrator. + + This feature may only be available to certain customer plans.""" + spot_bid_price_percent: Optional[int] = None """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -4845,6 +4857,8 @@ def as_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability.value + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -4856,6 +4870,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.availability is not None: body["availability"] = self.availability + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn if self.spot_bid_price_percent is not None: body["spot_bid_price_percent"] = self.spot_bid_price_percent if self.zone_id is not None: @@ -4867,6 +4883,7 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" return cls( availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability), + instance_profile_arn=d.get("instance_profile_arn", None), spot_bid_price_percent=d.get("spot_bid_price_percent", None), zone_id=d.get("zone_id", None), ) @@ -7087,6 +7104,7 @@ class TerminationReasonCode(Enum): DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DBR_IMAGE_RESOLUTION_FAILURE = "DBR_IMAGE_RESOLUTION_FAILURE" DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" @@ -7783,6 +7801,7 @@ def create( :returns: :class:`CreatePolicyResponse` """ + body = {} if definition is not None: body["definition"] = definition @@ -7814,6 +7833,7 @@ def delete(self, policy_id: str): """ + body = {} if policy_id is not None: body["policy_id"] = policy_id @@ -7873,6 +7893,7 @@ def edit( """ + body = {} if definition is not None: body["definition"] = definition @@ -7991,6 +8012,7 @@ def set_permissions( :returns: :class:`ClusterPolicyPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8016,6 +8038,7 @@ def update_permissions( :returns: :class:`ClusterPolicyPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8133,6 +8156,7 @@ def change_owner(self, cluster_id: str, owner_username: str): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -8332,6 +8356,7 @@ def create( Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if apply_policy_default_values is not None: body["apply_policy_default_values"] = apply_policy_default_values @@ -8497,6 +8522,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_terminated for more details. """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -8695,6 +8721,7 @@ def edit( Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if apply_policy_default_values is not None: body["apply_policy_default_values"] = apply_policy_default_values @@ -8891,6 +8918,7 @@ def events( :returns: Iterator over :class:`ClusterEvent` """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9064,6 +9092,7 @@ def permanent_delete(self, cluster_id: str): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9082,6 +9111,7 @@ def pin(self, cluster_id: str): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9117,6 +9147,7 @@ def resize( Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if autoscale is not None: body["autoscale"] = autoscale.as_dict() @@ -9154,6 +9185,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9184,6 +9216,7 @@ def set_permissions( :returns: :class:`ClusterPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9223,6 +9256,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9246,6 +9280,7 @@ def unpin(self, cluster_id: str): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -9290,6 +9325,7 @@ def update( Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ + body = {} if cluster is not None: body["cluster"] = cluster.as_dict() @@ -9326,6 +9362,7 @@ def update_permissions( :returns: :class:`ClusterPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9469,6 +9506,7 @@ def cancel( Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_cancelled for more details. """ + body = {} if cluster_id is not None: body["clusterId"] = cluster_id @@ -9561,6 +9599,7 @@ def create( Long-running operation waiter for :class:`ContextStatusResponse`. See :method:wait_context_status_command_execution_running for more details. """ + body = {} if cluster_id is not None: body["clusterId"] = cluster_id @@ -9592,6 +9631,7 @@ def destroy(self, cluster_id: str, context_id: str): """ + body = {} if cluster_id is not None: body["clusterId"] = cluster_id @@ -9628,6 +9668,7 @@ def execute( Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_finished_or_error for more details. """ + body = {} if cluster_id is not None: body["clusterId"] = cluster_id @@ -9701,6 +9742,7 @@ def create( :returns: :class:`CreateResponse` """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -9793,6 +9835,7 @@ def update( """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -9905,6 +9948,7 @@ def create( :returns: :class:`CreateInstancePoolResponse` """ + body = {} if aws_attributes is not None: body["aws_attributes"] = aws_attributes.as_dict() @@ -9952,6 +9996,7 @@ def delete(self, instance_pool_id: str): """ + body = {} if instance_pool_id is not None: body["instance_pool_id"] = instance_pool_id @@ -10013,6 +10058,7 @@ def edit( """ + body = {} if custom_tags is not None: body["custom_tags"] = custom_tags @@ -10120,6 +10166,7 @@ def set_permissions( :returns: :class:`InstancePoolPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -10143,6 +10190,7 @@ def update_permissions( :returns: :class:`InstancePoolPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -10205,6 +10253,7 @@ def add( """ + body = {} if iam_role_arn is not None: body["iam_role_arn"] = iam_role_arn @@ -10259,6 +10308,7 @@ def edit( """ + body = {} if iam_role_arn is not None: body["iam_role_arn"] = iam_role_arn @@ -10301,6 +10351,7 @@ def remove(self, instance_profile_arn: str): """ + body = {} if instance_profile_arn is not None: body["instance_profile_arn"] = instance_profile_arn @@ -10381,6 +10432,7 @@ def install(self, cluster_id: str, libraries: List[Library]): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -10404,6 +10456,7 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id @@ -10453,6 +10506,7 @@ def enforce_compliance( :returns: :class:`EnforceClusterComplianceResponse` """ + body = {} if cluster_id is not None: body["cluster_id"] = cluster_id diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 69b544f7d..b94b2a089 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1756,6 +1756,7 @@ def create_message(self, space_id: str, conversation_id: str, content: str) -> W Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ + body = {} if content is not None: body["content"] = content @@ -2116,6 +2117,7 @@ def send_message_feedback(self, space_id: str, conversation_id: str, message_id: """ + body = {} if rating is not None: body["rating"] = rating.value @@ -2143,6 +2145,7 @@ def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ + body = {} if content is not None: body["content"] = content @@ -2195,6 +2198,7 @@ def create(self, dashboard: Dashboard) -> Dashboard: :returns: :class:`Dashboard` """ + body = dashboard.as_dict() headers = { "Accept": "application/json", @@ -2214,6 +2218,7 @@ def create_schedule(self, dashboard_id: str, schedule: Schedule) -> Schedule: :returns: :class:`Schedule` """ + body = schedule.as_dict() headers = { "Accept": "application/json", @@ -2235,6 +2240,7 @@ def create_subscription(self, dashboard_id: str, schedule_id: str, subscription: :returns: :class:`Subscription` """ + body = subscription.as_dict() headers = { "Accept": "application/json", @@ -2530,6 +2536,7 @@ def migrate( :returns: :class:`Dashboard` """ + body = {} if display_name is not None: body["display_name"] = display_name @@ -2562,6 +2569,7 @@ def publish( :returns: :class:`PublishedDashboard` """ + body = {} if embed_credentials is not None: body["embed_credentials"] = embed_credentials @@ -2614,6 +2622,7 @@ def update(self, dashboard_id: str, dashboard: Dashboard) -> Dashboard: :returns: :class:`Dashboard` """ + body = dashboard.as_dict() headers = { "Accept": "application/json", @@ -2635,6 +2644,7 @@ def update_schedule(self, dashboard_id: str, schedule_id: str, schedule: Schedul :returns: :class:`Schedule` """ + body = schedule.as_dict() headers = { "Accept": "application/json", diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index aed67de1d..b0bbbd7cb 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -5,6 +5,7 @@ import logging import random import time +import uuid from dataclasses import dataclass from datetime import timedelta from enum import Enum @@ -1570,6 +1571,7 @@ def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog: :returns: :class:`DatabaseCatalog` """ + body = catalog.as_dict() headers = { "Accept": "application/json", @@ -1589,6 +1591,7 @@ def create_database_instance(self, database_instance: DatabaseInstance) -> Wait[ Long-running operation waiter for :class:`DatabaseInstance`. See :method:wait_get_database_instance_database_available for more details. """ + body = database_instance.as_dict() headers = { "Accept": "application/json", @@ -1622,6 +1625,7 @@ def create_database_instance_role( :returns: :class:`DatabaseInstanceRole` """ + body = database_instance_role.as_dict() query = {} if database_instance_name is not None: @@ -1644,6 +1648,7 @@ def create_database_table(self, table: DatabaseTable) -> DatabaseTable: :returns: :class:`DatabaseTable` """ + body = table.as_dict() headers = { "Accept": "application/json", @@ -1660,6 +1665,7 @@ def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> Syn :returns: :class:`SyncedDatabaseTable` """ + body = synced_table.as_dict() headers = { "Accept": "application/json", @@ -1806,6 +1812,9 @@ def generate_database_credential( :returns: :class:`DatabaseCredential` """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) body = {} if claims is not None: body["claims"] = [v.as_dict() for v in claims] @@ -2049,6 +2058,7 @@ def update_database_catalog( :returns: :class:`DatabaseCatalog` """ + body = database_catalog.as_dict() query = {} if update_mask is not None: @@ -2075,6 +2085,7 @@ def update_database_instance( :returns: :class:`DatabaseInstance` """ + body = database_instance.as_dict() query = {} if update_mask is not None: @@ -2101,6 +2112,7 @@ def update_synced_database_table( :returns: :class:`SyncedDatabaseTable` """ + body = synced_table.as_dict() query = {} if update_mask is not None: diff --git a/databricks/sdk/service/dataquality.py b/databricks/sdk/service/dataquality.py index fa99857fb..d9a64e5df 100755 --- a/databricks/sdk/service/dataquality.py +++ b/databricks/sdk/service/dataquality.py @@ -909,6 +909,7 @@ def create_monitor(self, monitor: Monitor) -> Monitor: :returns: :class:`Monitor` """ + body = monitor.as_dict() headers = { "Accept": "application/json", @@ -946,6 +947,7 @@ def create_refresh(self, object_type: str, object_id: str, refresh: Refresh) -> :returns: :class:`Refresh` """ + body = refresh.as_dict() headers = { "Accept": "application/json", @@ -1229,6 +1231,7 @@ def update_monitor(self, object_type: str, object_id: str, monitor: Monitor, upd :returns: :class:`Monitor` """ + body = monitor.as_dict() query = {} if update_mask is not None: @@ -1271,6 +1274,7 @@ def update_refresh( :returns: :class:`Refresh` """ + body = refresh.as_dict() query = {} if update_mask is not None: diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index e26de85ab..a6de83099 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -51,24 +51,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: return cls() -@dataclass -class CreateDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateDirectoryResponse: - """Deserializes the CreateDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class CreateResponse: handle: Optional[int] = None @@ -95,24 +77,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: return cls(handle=d.get("handle", None)) -@dataclass -class DeleteDirectoryResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDirectoryResponse: - """Deserializes the DeleteDirectoryResponse from a dictionary.""" - return cls() - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -290,24 +254,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FileInfo: ) -@dataclass -class GetDirectoryMetadataResponse: - def as_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetDirectoryMetadataResponse: - """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" - return cls() - - @dataclass class GetMetadataResponse: content_length: Optional[int] = None @@ -496,24 +442,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ReadResponse: return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None)) -@dataclass -class UploadResponse: - def as_dict(self) -> dict: - """Serializes the UploadResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UploadResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UploadResponse: - """Deserializes the UploadResponse from a dictionary.""" - return cls() - - class DbfsAPI: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" @@ -534,6 +462,7 @@ def add_block(self, handle: int, data: str): """ + body = {} if data is not None: body["data"] = data @@ -555,6 +484,7 @@ def close(self, handle: int): """ + body = {} if handle is not None: body["handle"] = handle @@ -582,6 +512,7 @@ def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateRespon :returns: :class:`CreateResponse` """ + body = {} if overwrite is not None: body["overwrite"] = overwrite @@ -619,6 +550,7 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): """ + body = {} if path is not None: body["path"] = path @@ -690,6 +622,7 @@ def mkdirs(self, path: str): """ + body = {} if path is not None: body["path"] = path @@ -713,6 +646,7 @@ def move(self, source_path: str, destination_path: str): """ + body = {} if destination_path is not None: body["destination_path"] = destination_path @@ -746,6 +680,7 @@ def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[ """ + body = {} if contents is not None: body["contents"] = contents diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 0a1f53ca2..e84121f29 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -501,24 +501,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ConsistencyToken: return cls(value=d.get("value", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteWorkspacePermissionAssignmentResponse: def as_dict(self) -> dict: @@ -1465,24 +1447,6 @@ class PatchOp(Enum): REPLACE = "replace" -@dataclass -class PatchResponse: - def as_dict(self) -> dict: - """Serializes the PatchResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchResponse: - """Deserializes the PatchResponse from a dictionary.""" - return cls() - - class PatchSchema(Enum): URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp" @@ -2338,6 +2302,7 @@ def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetR :returns: :class:`RuleSetResponse` """ + body = {} if name is not None: body["name"] = name @@ -2444,6 +2409,7 @@ def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetR :returns: :class:`RuleSetResponse` """ + body = {} if name is not None: body["name"] = name @@ -2494,6 +2460,7 @@ def create( :returns: :class:`AccountGroup` """ + body = {} if display_name is not None: body["displayName"] = display_name @@ -2628,6 +2595,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -2666,6 +2634,7 @@ def update( """ + body = {} if display_name is not None: body["displayName"] = display_name @@ -2721,6 +2690,7 @@ def create( :returns: :class:`AccountServicePrincipal` """ + body = {} if active is not None: body["active"] = active @@ -2860,6 +2830,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -2905,6 +2876,7 @@ def update( """ + body = {} if active is not None: body["active"] = active @@ -2976,6 +2948,7 @@ def create( :returns: :class:`AccountUser` """ + body = {} if active is not None: body["active"] = active @@ -3162,6 +3135,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -3208,6 +3182,7 @@ def update( """ + body = {} if active is not None: body["active"] = active @@ -3299,6 +3274,7 @@ def create( :returns: :class:`Group` """ + body = {} if display_name is not None: body["displayName"] = display_name @@ -3433,6 +3409,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -3481,6 +3458,7 @@ def update( """ + body = {} if display_name is not None: body["displayName"] = display_name @@ -3533,6 +3511,7 @@ def migrate_permissions( :returns: :class:`MigratePermissionsResponse` """ + body = {} if from_workspace_group_name is not None: body["from_workspace_group_name"] = from_workspace_group_name @@ -3644,6 +3623,7 @@ def set( :returns: :class:`ObjectPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -3678,6 +3658,7 @@ def update( :returns: :class:`ObjectPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -3739,6 +3720,7 @@ def create( :returns: :class:`ServicePrincipal` """ + body = {} if active is not None: body["active"] = active @@ -3873,6 +3855,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -3924,6 +3907,7 @@ def update( """ + body = {} if active is not None: body["active"] = active @@ -4010,6 +3994,7 @@ def create( :returns: :class:`User` """ + body = {} if active is not None: body["active"] = active @@ -4224,6 +4209,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O """ + body = {} if operations is not None: body["Operations"] = [v.as_dict() for v in operations] @@ -4246,6 +4232,7 @@ def set_permissions( :returns: :class:`PasswordPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -4303,6 +4290,7 @@ def update( """ + body = {} if active is not None: body["active"] = active @@ -4340,6 +4328,7 @@ def update_permissions( :returns: :class:`PasswordPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -4441,6 +4430,7 @@ def update( :returns: :class:`PermissionAssignment` """ + body = {} if permissions is not None: body["permissions"] = [v.value for v in permissions] diff --git a/databricks/sdk/service/iamv2.py b/databricks/sdk/service/iamv2.py index b95605da5..0422f37f8 100755 --- a/databricks/sdk/service/iamv2.py +++ b/databricks/sdk/service/iamv2.py @@ -472,6 +472,7 @@ def resolve_group(self, external_id: str) -> ResolveGroupResponse: :returns: :class:`ResolveGroupResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id @@ -498,6 +499,7 @@ def resolve_service_principal(self, external_id: str) -> ResolveServicePrincipal :returns: :class:`ResolveServicePrincipalResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id @@ -524,6 +526,7 @@ def resolve_user(self, external_id: str) -> ResolveUserResponse: :returns: :class:`ResolveUserResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id @@ -587,6 +590,7 @@ def resolve_group_proxy(self, external_id: str) -> ResolveGroupResponse: :returns: :class:`ResolveGroupResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id @@ -608,6 +612,7 @@ def resolve_service_principal_proxy(self, external_id: str) -> ResolveServicePri :returns: :class:`ResolveServicePrincipalResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id @@ -631,6 +636,7 @@ def resolve_user_proxy(self, external_id: str) -> ResolveUserResponse: :returns: :class:`ResolveUserResponse` """ + body = {} if external_id is not None: body["external_id"] = external_id diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 9f8766784..28b9e5c64 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -449,42 +449,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: ) -@dataclass -class CancelAllRunsResponse: - def as_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: - """Deserializes the CancelAllRunsResponse from a dictionary.""" - return cls() - - -@dataclass -class CancelRunResponse: - def as_dict(self) -> dict: - """Serializes the CancelRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse: - """Deserializes the CancelRunResponse from a dictionary.""" - return cls() - - class CleanRoomTaskRunLifeCycleState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition""" @@ -1515,42 +1479,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtTask: ) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteRunResponse: - def as_dict(self) -> dict: - """Serializes the DeleteRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: - """Deserializes the DeleteRunResponse from a dictionary.""" - return cls() - - @dataclass class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Represents a change to the job cluster's settings that would be required for the job clusters to @@ -4212,24 +4140,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: return cls(repair_id=d.get("repair_id", None)) -@dataclass -class ResetResponse: - def as_dict(self) -> dict: - """Serializes the ResetResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResetResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResetResponse: - """Deserializes the ResetResponse from a dictionary.""" - return cls() - - @dataclass class ResolvedConditionTaskValues: left: Optional[str] = None @@ -8056,9 +7966,6 @@ class TriggerSettings: periodic: Optional[PeriodicTriggerConfiguration] = None """Periodic trigger settings.""" - table: Optional[TableUpdateTriggerConfiguration] = None - """Old table trigger settings name. Deprecated in favor of `table_update`.""" - table_update: Optional[TableUpdateTriggerConfiguration] = None def as_dict(self) -> dict: @@ -8070,8 +7977,6 @@ def as_dict(self) -> dict: body["pause_status"] = self.pause_status.value if self.periodic: body["periodic"] = self.periodic.as_dict() - if self.table: - body["table"] = self.table.as_dict() if self.table_update: body["table_update"] = self.table_update.as_dict() return body @@ -8085,8 +7990,6 @@ def as_shallow_dict(self) -> dict: body["pause_status"] = self.pause_status if self.periodic: body["periodic"] = self.periodic - if self.table: - body["table"] = self.table if self.table_update: body["table_update"] = self.table_update return body @@ -8098,7 +8001,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), pause_status=_enum(d, "pause_status", PauseStatus), periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), - table=_from_dict(d, "table", TableUpdateTriggerConfiguration), table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration), ) @@ -8159,24 +8061,6 @@ class TriggerType(Enum): TABLE = "TABLE" -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - @dataclass class ViewItem: content: Optional[str] = None @@ -8418,6 +8302,7 @@ def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Opt """ + body = {} if all_queued_runs is not None: body["all_queued_runs"] = all_queued_runs @@ -8440,6 +8325,7 @@ def cancel_run(self, run_id: int) -> Wait[Run]: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ + body = {} if run_id is not None: body["run_id"] = run_id @@ -8584,6 +8470,7 @@ def create( :returns: :class:`CreateResponse` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8653,6 +8540,7 @@ def delete(self, job_id: int): """ + body = {} if job_id is not None: body["job_id"] = job_id @@ -8670,6 +8558,7 @@ def delete_run(self, run_id: int): """ + body = {} if run_id is not None: body["run_id"] = run_id @@ -9096,6 +8985,7 @@ def repair_run( Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ + body = {} if dbt_commands is not None: body["dbt_commands"] = [v for v in dbt_commands] @@ -9191,6 +9081,7 @@ def reset(self, job_id: int, new_settings: JobSettings): """ + body = {} if job_id is not None: body["job_id"] = job_id @@ -9331,6 +9222,7 @@ def run_now( Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ + body = {} if dbt_commands is not None: body["dbt_commands"] = [v for v in dbt_commands] @@ -9420,6 +9312,7 @@ def set_permissions( :returns: :class:`JobPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9509,6 +9402,7 @@ def submit( Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9615,6 +9509,7 @@ def update( """ + body = {} if fields_to_remove is not None: body["fields_to_remove"] = [v for v in fields_to_remove] @@ -9639,6 +9534,7 @@ def update_permissions( :returns: :class:`JobPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9680,6 +9576,7 @@ def enforce_compliance( :returns: :class:`EnforcePolicyComplianceResponse` """ + body = {} if job_id is not None: body["job_id"] = job_id diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 8fc57b942..5e5ccc267 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +import uuid from dataclasses import dataclass from enum import Enum from typing import Any, Dict, Iterator, List, Optional @@ -3032,6 +3033,7 @@ def create( :returns: :class:`Installation` """ + body = {} if accepted_consumer_terms is not None: body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() @@ -3156,6 +3158,7 @@ def update( :returns: :class:`UpdateInstallationResponse` """ + body = {} if installation is not None: body["installation"] = installation.as_dict() @@ -3377,6 +3380,7 @@ def create( :returns: :class:`CreatePersonalizationRequestResponse` """ + body = {} if accepted_consumer_terms is not None: body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() @@ -3542,6 +3546,7 @@ def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse: :returns: :class:`CreateExchangeFilterResponse` """ + body = {} if filter is not None: body["filter"] = filter.as_dict() @@ -3607,6 +3612,7 @@ def update(self, id: str, filter: ExchangeFilter) -> UpdateExchangeFilterRespons :returns: :class:`UpdateExchangeFilterResponse` """ + body = {} if filter is not None: body["filter"] = filter.as_dict() @@ -3633,6 +3639,7 @@ def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExcha :returns: :class:`AddExchangeForListingResponse` """ + body = {} if exchange_id is not None: body["exchange_id"] = exchange_id @@ -3653,6 +3660,7 @@ def create(self, exchange: Exchange) -> CreateExchangeResponse: :returns: :class:`CreateExchangeResponse` """ + body = {} if exchange is not None: body["exchange"] = exchange.as_dict() @@ -3810,6 +3818,7 @@ def update(self, id: str, exchange: Exchange) -> UpdateExchangeResponse: :returns: :class:`UpdateExchangeResponse` """ + body = {} if exchange is not None: body["exchange"] = exchange.as_dict() @@ -3845,6 +3854,7 @@ def create( :returns: :class:`CreateFileResponse` """ + body = {} if display_name is not None: body["display_name"] = display_name @@ -3938,6 +3948,7 @@ def create(self, listing: Listing) -> CreateListingResponse: :returns: :class:`CreateListingResponse` """ + body = {} if listing is not None: body["listing"] = listing.as_dict() @@ -4013,6 +4024,7 @@ def update(self, id: str, listing: Listing) -> UpdateListingResponse: :returns: :class:`UpdateListingResponse` """ + body = {} if listing is not None: body["listing"] = listing.as_dict() @@ -4083,6 +4095,9 @@ def update( :returns: :class:`UpdatePersonalizationRequestResponse` """ + + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) body = {} if reason is not None: body["reason"] = reason @@ -4164,6 +4179,7 @@ def update(self, id: str, *, version: Optional[int] = None) -> UpdateProviderAna :returns: :class:`UpdateProviderAnalyticsDashboardResponse` """ + body = {} if version is not None: body["version"] = version @@ -4189,6 +4205,7 @@ def create(self, provider: ProviderInfo) -> CreateProviderResponse: :returns: :class:`CreateProviderResponse` """ + body = {} if provider is not None: body["provider"] = provider.as_dict() @@ -4264,6 +4281,7 @@ def update(self, id: str, provider: ProviderInfo) -> UpdateProviderResponse: :returns: :class:`UpdateProviderResponse` """ + body = {} if provider is not None: body["provider"] = provider.as_dict() diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index b42f008f9..b95021e3d 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -291,6 +291,38 @@ def from_dict(cls, d: Dict[str, Any]) -> CommentObject: ) +@dataclass +class ContinuousWindow: + window_duration: str + """The duration of the continuous window (must be positive).""" + + offset: Optional[str] = None + """The offset of the continuous window (must be non-positive).""" + + def as_dict(self) -> dict: + """Serializes the ContinuousWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.offset is not None: + body["offset"] = self.offset + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ContinuousWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.offset is not None: + body["offset"] = self.offset + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ContinuousWindow: + """Deserializes the ContinuousWindow from a dictionary.""" + return cls(offset=d.get("offset", None), window_duration=d.get("window_duration", None)) + + @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None @@ -2987,10 +3019,6 @@ class MaterializedFeature: feature_name: str """The full name of the feature in Unity Catalog.""" - offline_store_config: OfflineStoreConfig - - online_store_config: OnlineStore - last_materialization_time: Optional[str] = None """The timestamp when the pipeline last ran and updated the materialized feature values. If the pipeline has not run yet, this field will be null.""" @@ -2998,6 +3026,10 @@ class MaterializedFeature: materialized_feature_id: Optional[str] = None """Unique identifier for the materialized feature.""" + offline_store_config: Optional[OfflineStoreConfig] = None + + online_store_config: Optional[OnlineStore] = None + pipeline_schedule_state: Optional[MaterializedFeaturePipelineScheduleState] = None """The schedule state of the materialization pipeline.""" @@ -5114,6 +5146,38 @@ def from_dict(cls, d: Dict[str, Any]) -> SetTagResponse: return cls() +@dataclass +class SlidingWindow: + window_duration: str + """The duration of the sliding window.""" + + slide_duration: str + """The slide duration (interval by which windows advance, must be positive and less than duration).""" + + def as_dict(self) -> dict: + """Serializes the SlidingWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.slide_duration is not None: + body["slide_duration"] = self.slide_duration + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SlidingWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.slide_duration is not None: + body["slide_duration"] = self.slide_duration + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SlidingWindow: + """Deserializes the SlidingWindow from a dictionary.""" + return cls(slide_duration=d.get("slide_duration", None), window_duration=d.get("window_duration", None)) + + class Status(Enum): """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -5161,34 +5225,42 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: @dataclass class TimeWindow: - duration: str - """The duration of the time window.""" + continuous: Optional[ContinuousWindow] = None - offset: Optional[str] = None - """The offset of the time window.""" + sliding: Optional[SlidingWindow] = None + + tumbling: Optional[TumblingWindow] = None def as_dict(self) -> dict: """Serializes the TimeWindow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.duration is not None: - body["duration"] = self.duration - if self.offset is not None: - body["offset"] = self.offset + if self.continuous: + body["continuous"] = self.continuous.as_dict() + if self.sliding: + body["sliding"] = self.sliding.as_dict() + if self.tumbling: + body["tumbling"] = self.tumbling.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TimeWindow into a shallow dictionary of its immediate attributes.""" body = {} - if self.duration is not None: - body["duration"] = self.duration - if self.offset is not None: - body["offset"] = self.offset + if self.continuous: + body["continuous"] = self.continuous + if self.sliding: + body["sliding"] = self.sliding + if self.tumbling: + body["tumbling"] = self.tumbling return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TimeWindow: """Deserializes the TimeWindow from a dictionary.""" - return cls(duration=d.get("duration", None), offset=d.get("offset", None)) + return cls( + continuous=_from_dict(d, "continuous", ContinuousWindow), + sliding=_from_dict(d, "sliding", SlidingWindow), + tumbling=_from_dict(d, "tumbling", TumblingWindow), + ) @dataclass @@ -5286,6 +5358,31 @@ def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: return cls(model_version_databricks=_from_dict(d, "model_version_databricks", ModelVersionDatabricks)) +@dataclass +class TumblingWindow: + window_duration: str + """The duration of each tumbling window (non-overlapping, fixed-duration windows).""" + + def as_dict(self) -> dict: + """Serializes the TumblingWindow into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TumblingWindow into a shallow dictionary of its immediate attributes.""" + body = {} + if self.window_duration is not None: + body["window_duration"] = self.window_duration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TumblingWindow: + """Deserializes the TumblingWindow from a dictionary.""" + return cls(window_duration=d.get("window_duration", None)) + + @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None @@ -5478,6 +5575,7 @@ def create_experiment( :returns: :class:`CreateExperimentResponse` """ + body = {} if artifact_location is not None: body["artifact_location"] = artifact_location @@ -5520,6 +5618,7 @@ def create_logged_model( :returns: :class:`CreateLoggedModelResponse` """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -5568,6 +5667,7 @@ def create_run( :returns: :class:`CreateRunResponse` """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -5596,6 +5696,7 @@ def delete_experiment(self, experiment_id: str): """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -5646,6 +5747,7 @@ def delete_run(self, run_id: str): """ + body = {} if run_id is not None: body["run_id"] = run_id @@ -5674,6 +5776,7 @@ def delete_runs( :returns: :class:`DeleteRunsResponse` """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -5700,6 +5803,7 @@ def delete_tag(self, run_id: str, key: str): """ + body = {} if key is not None: body["key"] = key @@ -5723,6 +5827,7 @@ def finalize_logged_model(self, model_id: str, status: LoggedModelStatus) -> Fin :returns: :class:`FinalizeLoggedModelResponse` """ + body = {} if status is not None: body["status"] = status.value @@ -6061,6 +6166,7 @@ def log_batch( """ + body = {} if metrics is not None: body["metrics"] = [v.as_dict() for v in metrics] @@ -6091,6 +6197,7 @@ def log_inputs( """ + body = {} if datasets is not None: body["datasets"] = [v.as_dict() for v in datasets] @@ -6117,6 +6224,7 @@ def log_logged_model_params(self, model_id: str, *, params: Optional[List[Logged """ + body = {} if params is not None: body["params"] = [v.as_dict() for v in params] @@ -6168,6 +6276,7 @@ def log_metric( """ + body = {} if dataset_digest is not None: body["dataset_digest"] = dataset_digest @@ -6207,6 +6316,7 @@ def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = """ + body = {} if model_json is not None: body["model_json"] = model_json @@ -6229,6 +6339,7 @@ def log_outputs(self, run_id: str, *, models: Optional[List[ModelOutput]] = None """ + body = {} if models is not None: body["models"] = [v.as_dict() for v in models] @@ -6258,6 +6369,7 @@ def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_u """ + body = {} if key is not None: body["key"] = key @@ -6286,6 +6398,7 @@ def restore_experiment(self, experiment_id: str): """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -6306,6 +6419,7 @@ def restore_run(self, run_id: str): """ + body = {} if run_id is not None: body["run_id"] = run_id @@ -6334,6 +6448,7 @@ def restore_runs( :returns: :class:`RestoreRunsResponse` """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -6375,6 +6490,7 @@ def search_experiments( :returns: Iterator over :class:`Experiment` """ + body = {} if filter is not None: body["filter"] = filter @@ -6434,6 +6550,7 @@ def search_logged_models( :returns: :class:`SearchLoggedModelsResponse` """ + body = {} if datasets is not None: body["datasets"] = [v.as_dict() for v in datasets] @@ -6497,6 +6614,7 @@ def search_runs( :returns: Iterator over :class:`Run` """ + body = {} if experiment_ids is not None: body["experiment_ids"] = [v for v in experiment_ids] @@ -6536,6 +6654,7 @@ def set_experiment_tag(self, experiment_id: str, key: str, value: str): """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -6560,6 +6679,7 @@ def set_logged_model_tags(self, model_id: str, *, tags: Optional[List[LoggedMode """ + body = {} if tags is not None: body["tags"] = [v.as_dict() for v in tags] @@ -6582,6 +6702,7 @@ def set_permissions( :returns: :class:`ExperimentPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -6608,6 +6729,7 @@ def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uui """ + body = {} if key is not None: body["key"] = key @@ -6634,6 +6756,7 @@ def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = Non """ + body = {} if experiment_id is not None: body["experiment_id"] = experiment_id @@ -6657,6 +6780,7 @@ def update_permissions( :returns: :class:`ExperimentPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -6693,6 +6817,7 @@ def update_run( :returns: :class:`UpdateRunResponse` """ + body = {} if end_time is not None: body["end_time"] = end_time @@ -6727,6 +6852,7 @@ def create_feature(self, feature: Feature) -> Feature: :returns: :class:`Feature` """ + body = feature.as_dict() headers = { "Accept": "application/json", @@ -6744,6 +6870,7 @@ def create_materialized_feature(self, materialized_feature: MaterializedFeature) :returns: :class:`MaterializedFeature` """ + body = materialized_feature.as_dict() headers = { "Accept": "application/json", @@ -6899,6 +7026,7 @@ def update_feature(self, full_name: str, feature: Feature, update_mask: str) -> :returns: :class:`Feature` """ + body = feature.as_dict() query = {} if update_mask is not None: @@ -6928,6 +7056,7 @@ def update_materialized_feature( :returns: :class:`MaterializedFeature` """ + body = materialized_feature.as_dict() query = {} if update_mask is not None: @@ -6966,6 +7095,7 @@ def create_online_store(self, online_store: OnlineStore) -> OnlineStore: :returns: :class:`OnlineStore` """ + body = online_store.as_dict() headers = { "Accept": "application/json", @@ -7047,6 +7177,7 @@ def publish_table(self, source_table_name: str, publish_spec: PublishSpec) -> Pu :returns: :class:`PublishTableResponse` """ + body = {} if publish_spec is not None: body["publish_spec"] = publish_spec.as_dict() @@ -7072,6 +7203,7 @@ def update_online_store(self, name: str, online_store: OnlineStore, update_mask: :returns: :class:`OnlineStore` """ + body = online_store.as_dict() query = {} if update_mask is not None: @@ -7204,6 +7336,7 @@ def create_experiment( Long-running operation waiter for :class:`ForecastingExperiment`. See :method:wait_get_experiment_forecasting_succeeded for more details. """ + body = {} if custom_weights_column is not None: body["custom_weights_column"] = custom_weights_column @@ -7326,6 +7459,7 @@ def create_feature_tag(self, table_name: str, feature_name: str, feature_tag: Fe :returns: :class:`FeatureTag` """ + body = feature_tag.as_dict() headers = { "Accept": "application/json", @@ -7464,6 +7598,7 @@ def update_feature_tag( :returns: :class:`FeatureTag` """ + body = feature_tag.as_dict() query = {} if update_mask is not None: @@ -7521,6 +7656,7 @@ def approve_transition_request( :returns: :class:`ApproveTransitionRequestResponse` """ + body = {} if archive_existing_versions is not None: body["archive_existing_versions"] = archive_existing_versions @@ -7553,6 +7689,7 @@ def create_comment(self, name: str, version: str, comment: str) -> CreateComment :returns: :class:`CreateCommentResponse` """ + body = {} if comment is not None: body["comment"] = comment @@ -7583,6 +7720,7 @@ def create_model( :returns: :class:`CreateModelResponse` """ + body = {} if description is not None: body["description"] = description @@ -7627,6 +7765,7 @@ def create_model_version( :returns: :class:`CreateModelVersionResponse` """ + body = {} if description is not None: body["description"] = description @@ -7672,6 +7811,7 @@ def create_transition_request( :returns: :class:`CreateTransitionRequestResponse` """ + body = {} if comment is not None: body["comment"] = comment @@ -7750,6 +7890,7 @@ def create_webhook( :returns: :class:`CreateWebhookResponse` """ + body = {} if description is not None: body["description"] = description @@ -7953,6 +8094,7 @@ def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) :returns: Iterator over :class:`ModelVersion` """ + body = {} if name is not None: body["name"] = name @@ -8222,6 +8364,7 @@ def reject_transition_request( :returns: :class:`RejectTransitionRequestResponse` """ + body = {} if comment is not None: body["comment"] = comment @@ -8249,6 +8392,7 @@ def rename_model(self, name: str, *, new_name: Optional[str] = None) -> RenameMo :returns: :class:`RenameModelResponse` """ + body = {} if name is not None: body["name"] = name @@ -8372,6 +8516,7 @@ def set_model_tag(self, name: str, key: str, value: str): """ + body = {} if key is not None: body["key"] = key @@ -8403,6 +8548,7 @@ def set_model_version_tag(self, name: str, version: str, key: str, value: str): """ + body = {} if key is not None: body["key"] = key @@ -8434,6 +8580,7 @@ def set_permissions( :returns: :class:`RegisteredModelPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8460,6 +8607,7 @@ def test_registry_webhook( :returns: :class:`TestRegistryWebhookResponse` """ + body = {} if event is not None: body["event"] = event.value @@ -8502,6 +8650,7 @@ def transition_stage( :returns: :class:`TransitionStageResponse` """ + body = {} if archive_existing_versions is not None: body["archive_existing_versions"] = archive_existing_versions @@ -8533,6 +8682,7 @@ def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: :returns: :class:`UpdateCommentResponse` """ + body = {} if comment is not None: body["comment"] = comment @@ -8556,6 +8706,7 @@ def update_model(self, name: str, *, description: Optional[str] = None) -> Updat :returns: :class:`UpdateModelResponse` """ + body = {} if description is not None: body["description"] = description @@ -8583,6 +8734,7 @@ def update_model_version( :returns: :class:`UpdateModelVersionResponse` """ + body = {} if description is not None: body["description"] = description @@ -8613,6 +8765,7 @@ def update_permissions( :returns: :class:`RegisteredModelPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8677,6 +8830,7 @@ def update_webhook( :returns: :class:`UpdateWebhookResponse` """ + body = {} if description is not None: body["description"] = description diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 28bae5347..e3af3dc71 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -194,24 +194,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePublishedAppIntegrationOutput: return cls() -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class FederationPolicy: create_time: Optional[str] = None @@ -1013,6 +995,7 @@ def create(self, policy: FederationPolicy, *, policy_id: Optional[str] = None) - :returns: :class:`FederationPolicy` """ + body = policy.as_dict() query = {} if policy_id is not None: @@ -1112,6 +1095,7 @@ def update( :returns: :class:`FederationPolicy` """ + body = policy.as_dict() query = {} if update_mask is not None: @@ -1169,6 +1153,7 @@ def create( :returns: :class:`CreateCustomAppIntegrationOutput` """ + body = {} if confidential is not None: body["confidential"] = confidential @@ -1301,6 +1286,7 @@ def update( """ + body = {} if redirect_urls is not None: body["redirect_urls"] = [v for v in redirect_urls] @@ -1386,6 +1372,7 @@ def create( :returns: :class:`CreatePublishedAppIntegrationOutput` """ + body = {} if app_id is not None: body["app_id"] = app_id @@ -1486,6 +1473,7 @@ def update(self, integration_id: str, *, token_access_policy: Optional[TokenAcce """ + body = {} if token_access_policy is not None: body["token_access_policy"] = token_access_policy.as_dict() @@ -1561,6 +1549,7 @@ def create( :returns: :class:`FederationPolicy` """ + body = policy.as_dict() query = {} if policy_id is not None: @@ -1677,6 +1666,7 @@ def update( :returns: :class:`FederationPolicy` """ + body = policy.as_dict() query = {} if update_mask is not None: @@ -1726,6 +1716,7 @@ def create( :returns: :class:`CreateServicePrincipalSecretResponse` """ + body = {} if lifetime is not None: body["lifetime"] = lifetime @@ -1836,6 +1827,7 @@ def create( :returns: :class:`CreateServicePrincipalSecretResponse` """ + body = {} if lifetime is not None: body["lifetime"] = lifetime diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 084d0126d..cea28bc53 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -2024,6 +2024,9 @@ class PipelineSpec: trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" + usage_policy_id: Optional[str] = None + """Usage policy of this pipeline.""" + def as_dict(self) -> dict: """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2081,6 +2084,8 @@ def as_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -2140,6 +2145,8 @@ def as_shallow_dict(self) -> dict: body["target"] = self.target if self.trigger: body["trigger"] = self.trigger + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -2173,6 +2180,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: tags=d.get("tags", None), target=d.get("target", None), trigger=_from_dict(d, "trigger", PipelineTrigger), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -3318,6 +3326,7 @@ def create( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ) -> CreatePipelineResponse: """Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. @@ -3388,9 +3397,12 @@ def create( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. :returns: :class:`CreatePipelineResponse` """ + body = {} if allow_duplicate_names is not None: body["allow_duplicate_names"] = allow_duplicate_names @@ -3452,6 +3464,8 @@ def create( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -3699,6 +3713,7 @@ def set_permissions( :returns: :class:`PipelinePermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -3741,6 +3756,7 @@ def start_update( :returns: :class:`StartUpdateResponse` """ + body = {} if cause is not None: body["cause"] = cause.value @@ -3815,6 +3831,7 @@ def update( tags: Optional[Dict[str, str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, + usage_policy_id: Optional[str] = None, ): """Updates a pipeline with the supplied configuration. @@ -3888,9 +3905,12 @@ def update( for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. """ + body = {} if allow_duplicate_names is not None: body["allow_duplicate_names"] = allow_duplicate_names @@ -3952,6 +3972,8 @@ def update( body["target"] = target if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -3970,6 +3992,7 @@ def update_permissions( :returns: :class:`PipelinePermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 2f173112e..a2e651647 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1765,6 +1765,7 @@ def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCred :returns: :class:`Credential` """ + body = {} if aws_credentials is not None: body["aws_credentials"] = aws_credentials.as_dict() @@ -1876,6 +1877,7 @@ def create( :returns: :class:`CustomerManagedKey` """ + body = {} if aws_key_info is not None: body["aws_key_info"] = aws_key_info.as_dict() @@ -1996,6 +1998,7 @@ def create( :returns: :class:`Network` """ + body = {} if gcp_network_info is not None: body["gcp_network_info"] = gcp_network_info.as_dict() @@ -2111,6 +2114,7 @@ def create( :returns: :class:`PrivateAccessSettings` """ + body = {} if allowed_vpc_endpoint_ids is not None: body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] @@ -2205,6 +2209,7 @@ def replace( :returns: :class:`PrivateAccessSettings` """ + body = customer_facing_private_access_settings.as_dict() headers = { "Accept": "application/json", @@ -2247,6 +2252,7 @@ def create( :returns: :class:`StorageConfiguration` """ + body = {} if role_arn is not None: body["role_arn"] = role_arn @@ -2356,6 +2362,7 @@ def create( :returns: :class:`VpcEndpoint` """ + body = {} if aws_vpc_endpoint_id is not None: body["aws_vpc_endpoint_id"] = aws_vpc_endpoint_id @@ -2595,6 +2602,7 @@ def create( Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ + body = {} if aws_region is not None: body["aws_region"] = aws_region @@ -2768,6 +2776,7 @@ def update( Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ + body = customer_facing_workspace.as_dict() query = {} if update_mask is not None: diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index 337bb86a5..322989fc9 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -151,6 +151,7 @@ def create_quality_monitor(self, quality_monitor: QualityMonitor) -> QualityMoni :returns: :class:`QualityMonitor` """ + body = quality_monitor.as_dict() headers = { "Accept": "application/json", @@ -237,6 +238,7 @@ def update_quality_monitor( :returns: :class:`QualityMonitor` """ + body = quality_monitor.as_dict() headers = { "Accept": "application/json", diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index d219f7495..e36972570 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1036,24 +1036,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DataframeSplitInput: return cls(columns=d.get("columns", None), data=d.get("data", None), index=d.get("index", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class EmailNotifications: on_update_failure: Optional[List[str]] = None @@ -1141,15 +1123,15 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum): @dataclass class EndpointCoreConfigInput: + name: str + """The name of the serving endpoint to update. This field is required.""" + auto_capture_config: Optional[AutoCaptureConfigInput] = None """Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - name: Optional[str] = None - """The name of the serving endpoint to update. This field is required.""" - served_entities: Optional[List[ServedEntityInput]] = None """The list of served entities under the serving endpoint config.""" @@ -4109,6 +4091,7 @@ def create( Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ + body = {} if ai_gateway is not None: body["ai_gateway"] = ai_gateway.as_dict() @@ -4196,6 +4179,7 @@ def create_provisioned_throughput_endpoint( Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ + body = {} if ai_gateway is not None: body["ai_gateway"] = ai_gateway.as_dict() @@ -4366,6 +4350,7 @@ def http_request( :returns: :class:`HttpRequestResponse` """ + body = {} if connection_name is not None: body["connection_name"] = connection_name @@ -4436,6 +4421,7 @@ def patch( :returns: :class:`EndpointTags` """ + body = {} if add_tags is not None: body["add_tags"] = [v.as_dict() for v in add_tags] @@ -4459,6 +4445,7 @@ def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> Pu :returns: :class:`PutResponse` """ + body = {} if rate_limits is not None: body["rate_limits"] = [v.as_dict() for v in rate_limits] @@ -4501,6 +4488,7 @@ def put_ai_gateway( :returns: :class:`PutAiGatewayResponse` """ + body = {} if fallback_config is not None: body["fallback_config"] = fallback_config.as_dict() @@ -4593,6 +4581,7 @@ def query( :returns: :class:`QueryEndpointResponse` """ + body = {} if client_request_id is not None: body["client_request_id"] = client_request_id @@ -4656,6 +4645,7 @@ def set_permissions( :returns: :class:`ServingEndpointPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -4701,6 +4691,7 @@ def update_config( Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ + body = {} if auto_capture_config is not None: body["auto_capture_config"] = auto_capture_config.as_dict() @@ -4753,6 +4744,7 @@ def update_notifications( :returns: :class:`UpdateInferenceEndpointNotificationsResponse` """ + body = {} if email_notifications is not None: body["email_notifications"] = email_notifications.as_dict() @@ -4779,6 +4771,7 @@ def update_permissions( :returns: :class:`ServingEndpointPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -4807,6 +4800,7 @@ def update_provisioned_throughput_endpoint_config( Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ + body = {} if config is not None: body["config"] = config.as_dict() @@ -4931,6 +4925,7 @@ def query( :returns: :class:`QueryEndpointResponse` """ + body = {} if client_request_id is not None: body["client_request_id"] = client_request_id diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 3bc7a2969..c6126a23e 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1697,24 +1697,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeletePersonalComputeSettingResponse: return cls(etag=d.get("etag", None)) -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeleteRestrictWorkspaceAdminsSettingResponse: """The etag is returned.""" @@ -4553,24 +4535,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: ) -@dataclass -class ReplaceResponse: - def as_dict(self) -> dict: - """Serializes the ReplaceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: - """Deserializes the ReplaceResponse from a dictionary.""" - return cls() - - @dataclass class RestrictWorkspaceAdminsMessage: status: RestrictWorkspaceAdminsMessageStatus @@ -4669,24 +4633,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenResponse: return cls() -@dataclass -class SetStatusResponse: - def as_dict(self) -> dict: - """Serializes the SetStatusResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: - """Deserializes the SetStatusResponse from a dictionary.""" - return cls() - - @dataclass class SlackConfig: channel_id: Optional[str] = None @@ -5215,24 +5161,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: ) -@dataclass -class UpdateResponse: - def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() - - WorkspaceConf = Dict[str, str] @@ -5317,6 +5245,7 @@ def create( :returns: :class:`CreateIpAccessListResponse` """ + body = {} if ip_addresses is not None: body["ip_addresses"] = [v for v in ip_addresses] @@ -5412,6 +5341,7 @@ def replace( """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -5466,6 +5396,7 @@ def update( """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -5619,6 +5550,7 @@ def update( :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -5723,6 +5655,7 @@ def update( :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -5800,6 +5733,7 @@ def update( :returns: :class:`AutomaticClusterUpdateSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -5876,6 +5810,7 @@ def update( :returns: :class:`ComplianceSecurityProfileSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -5916,6 +5851,7 @@ def exchange_token( :returns: :class:`ExchangeTokenResponse` """ + body = {} if partition_id is not None: body["partitionId"] = partition_id.as_dict() @@ -5992,6 +5928,7 @@ def update( :returns: :class:`CspEnablementAccountSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6095,6 +6032,7 @@ def update( :returns: :class:`DashboardEmailSubscriptions` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6205,6 +6143,7 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_ma :returns: :class:`DefaultNamespaceSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6299,6 +6238,7 @@ def update(self, allow_missing: bool, setting: DefaultWarehouseId, field_mask: s :returns: :class:`DefaultWarehouseId` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6396,6 +6336,7 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess, field_mask: :returns: :class:`DisableLegacyAccess` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6496,6 +6437,7 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st :returns: :class:`DisableLegacyDbfs` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6599,6 +6541,7 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures, field_mask :returns: :class:`DisableLegacyFeatures` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6663,6 +6606,7 @@ def patch_enable_export_notebook( :returns: :class:`EnableExportNotebook` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6763,6 +6707,7 @@ def update(self, allow_missing: bool, setting: AccountIpAccessEnable, field_mask :returns: :class:`AccountIpAccessEnable` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6829,6 +6774,7 @@ def patch_enable_notebook_table_clipboard( :returns: :class:`EnableNotebookTableClipboard` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6889,6 +6835,7 @@ def patch_enable_results_downloading( :returns: :class:`EnableResultsDownloading` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -6967,6 +6914,7 @@ def update( :returns: :class:`EnhancedSecurityMonitoringSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -7042,6 +6990,7 @@ def update( :returns: :class:`EsmEnablementAccountSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -7110,6 +7059,7 @@ def create( :returns: :class:`CreateIpAccessListResponse` """ + body = {} if ip_addresses is not None: body["ip_addresses"] = [v for v in ip_addresses] @@ -7200,6 +7150,7 @@ def replace( """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -7250,6 +7201,7 @@ def update( """ + body = {} if enabled is not None: body["enabled"] = enabled @@ -7321,6 +7273,7 @@ def update( :returns: :class:`LlmProxyPartnerPoweredAccount` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -7398,6 +7351,7 @@ def update( :returns: :class:`LlmProxyPartnerPoweredEnforce` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -7496,6 +7450,7 @@ def update( :returns: :class:`LlmProxyPartnerPoweredWorkspace` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -7546,6 +7501,7 @@ def create_network_connectivity_configuration( :returns: :class:`NetworkConnectivityConfiguration` """ + body = network_connectivity_config.as_dict() headers = { "Accept": "application/json", @@ -7576,6 +7532,7 @@ def create_private_endpoint_rule( :returns: :class:`NccPrivateEndpointRule` """ + body = private_endpoint_rule.as_dict() headers = { "Accept": "application/json", @@ -7773,6 +7730,7 @@ def update_private_endpoint_rule( :returns: :class:`NccPrivateEndpointRule` """ + body = private_endpoint_rule.as_dict() query = {} if update_mask is not None: @@ -7812,6 +7770,7 @@ def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> Acc :returns: :class:`AccountNetworkPolicy` """ + body = network_policy.as_dict() headers = { "Accept": "application/json", @@ -7897,6 +7856,7 @@ def update_network_policy_rpc( :returns: :class:`AccountNetworkPolicy` """ + body = network_policy.as_dict() headers = { "Accept": "application/json", @@ -7931,6 +7891,7 @@ def create(self, *, config: Optional[Config] = None, display_name: Optional[str] :returns: :class:`NotificationDestination` """ + body = {} if config is not None: body["config"] = config.as_dict() @@ -8017,6 +7978,7 @@ def update( :returns: :class:`NotificationDestination` """ + body = {} if config is not None: body["config"] = config.as_dict() @@ -8118,6 +8080,7 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting, field_mas :returns: :class:`PersonalComputeSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -8230,6 +8193,7 @@ def update( :returns: :class:`RestrictWorkspaceAdminsSetting` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -8428,6 +8392,7 @@ def update(self, allow_missing: bool, setting: SqlResultsDownload, field_mask: s :returns: :class:`SqlResultsDownload` """ + body = {} if allow_missing is not None: body["allow_missing"] = allow_missing @@ -8467,6 +8432,7 @@ def create_obo_token( :returns: :class:`CreateOboTokenResponse` """ + body = {} if application_id is not None: body["application_id"] = application_id @@ -8575,6 +8541,7 @@ def set_permissions( :returns: :class:`TokenPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8595,6 +8562,7 @@ def update_permissions( :returns: :class:`TokenPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8628,6 +8596,7 @@ def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[in :returns: :class:`CreateTokenResponse` """ + body = {} if comment is not None: body["comment"] = comment @@ -8651,6 +8620,7 @@ def delete(self, token_id: str): """ + body = {} if token_id is not None: body["token_id"] = token_id @@ -8753,6 +8723,7 @@ def update_workspace_network_option_rpc( :returns: :class:`WorkspaceNetworkOption` """ + body = workspace_network_option.as_dict() headers = { "Accept": "application/json", diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py index 322ffd47c..a529d7a5a 100755 --- a/databricks/sdk/service/settingsv2.py +++ b/databricks/sdk/service/settingsv2.py @@ -793,6 +793,7 @@ def patch_public_account_setting(self, name: str, setting: Setting) -> Setting: :returns: :class:`Setting` """ + body = setting.as_dict() headers = { "Accept": "application/json", @@ -875,6 +876,7 @@ def patch_public_workspace_setting(self, name: str, setting: Setting) -> Setting :returns: :class:`Setting` """ + body = setting.as_dict() headers = { "Accept": "application/json", diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index fe45bd6d8..bc2d78bc4 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -55,24 +55,6 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class DeltaSharingDependency: """Represents a UC dependency.""" @@ -2563,6 +2545,7 @@ def create( :returns: :class:`ProviderInfo` """ + body = {} if authentication_type is not None: body["authentication_type"] = authentication_type.value @@ -2778,6 +2761,7 @@ def update( :returns: :class:`ProviderInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -2898,6 +2882,7 @@ def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPol :returns: :class:`FederationPolicy` """ + body = policy.as_dict() headers = { "Accept": "application/json", @@ -3047,6 +3032,7 @@ def create( :returns: :class:`RecipientInfo` """ + body = {} if authentication_type is not None: body["authentication_type"] = authentication_type.value @@ -3169,6 +3155,7 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci :returns: :class:`RecipientInfo` """ + body = {} if existing_token_expire_in_seconds is not None: body["existing_token_expire_in_seconds"] = existing_token_expire_in_seconds @@ -3183,8 +3170,8 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci def share_permissions( self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> GetRecipientSharePermissionsResponse: - """Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the - owner of the Recipient. + """Gets the share permissions for the specified Recipient. The caller must have the USE_RECIPIENT + privilege on the metastore or be the owner of the Recipient. :param name: str The name of the Recipient. @@ -3250,6 +3237,7 @@ def update( :returns: :class:`RecipientInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -3294,6 +3282,7 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti :returns: :class:`ShareInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -3323,8 +3312,8 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/shares/{name}", headers=headers) def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> ShareInfo: - """Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the - share. + """Gets a data object share from the metastore. The caller must have the USE_SHARE privilege on the + metastore or be the owner of the share. :param name: str The name of the share. @@ -3347,8 +3336,9 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share def list_shares( self, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ShareInfo]: - """Gets an array of data object shares from the metastore. The caller must be a metastore admin or the - owner of the share. There is no guarantee of a specific ordering of the elements in the array. + """Gets an array of data object shares from the metastore. If the caller has the USE_SHARE privilege on + the metastore, all shares are returned. Otherwise, only shares owned by the caller are returned. There + is no guarantee of a specific ordering of the elements in the array. :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured @@ -3387,11 +3377,11 @@ def list_shares( def share_permissions( self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> GetSharePermissionsResponse: - """Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the - owner of the share. + """Gets the permissions for a data share from the metastore. The caller must have the USE_SHARE privilege + on the metastore or be the owner of the share. :param name: str - The name of the share. + The name of the Recipient. :param max_results: int (optional) Maximum number of permissions to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum @@ -3459,6 +3449,7 @@ def update( :returns: :class:`ShareInfo` """ + body = {} if comment is not None: body["comment"] = comment @@ -3485,11 +3476,11 @@ def update_permissions( changes: Optional[List[PermissionsChange]] = None, omit_permissions_list: Optional[bool] = None, ) -> UpdateSharePermissionsResponse: - """Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an - owner of the share. + """Updates the permissions for a data share in the metastore. The caller must have both the USE_SHARE and + SET_SHARE_PERMISSION privileges on the metastore, or be the owner of the share. - For new recipient grants, the user must also be the recipient owner or metastore admin. recipient - revocations do not require additional privileges. + For new recipient grants, the user must also be the owner of the recipients. recipient revocations do + not require additional privileges. :param name: str The name of the share. @@ -3500,6 +3491,7 @@ def update_permissions( :returns: :class:`UpdateSharePermissionsResponse` """ + body = {} if changes is not None: body["changes"] = [v.as_dict() for v in changes] diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index afbb45083..2b839f687 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -331,6 +331,12 @@ class AlertEvaluationState(Enum): UNKNOWN = "UNKNOWN" +class AlertLifecycleState(Enum): + + ACTIVE = "ACTIVE" + DELETED = "DELETED" + + @dataclass class AlertOperandColumn: name: Optional[str] = None @@ -665,7 +671,7 @@ class AlertV2: id: Optional[str] = None """UUID identifying the alert.""" - lifecycle_state: Optional[LifecycleState] = None + lifecycle_state: Optional[AlertLifecycleState] = None """Indicates whether the query is trashed.""" owner_user_name: Optional[str] = None @@ -776,7 +782,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AlertV2: effective_run_as=_from_dict(d, "effective_run_as", AlertV2RunAs), evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), id=d.get("id", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + lifecycle_state=_enum(d, "lifecycle_state", AlertLifecycleState), owner_user_name=d.get("owner_user_name", None), parent_path=d.get("parent_path", None), query_text=d.get("query_text", None), @@ -1132,24 +1138,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseChunkInfo: ) -@dataclass -class CancelExecutionResponse: - def as_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelExecutionResponse: - """Deserializes the CancelExecutionResponse from a dictionary.""" - return cls() - - @dataclass class Channel: """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be @@ -5111,7 +5099,7 @@ class QueryMetrics: queue.""" pruned_bytes: Optional[int] = None - """Total number of bytes in all tables not read due to pruning""" + """Total number of file bytes in all tables not read due to pruning""" pruned_files_count: Optional[int] = None """Total number of files from all tables not read due to pruning""" @@ -5125,6 +5113,9 @@ class QueryMetrics: read_cache_bytes: Optional[int] = None """Size of persistent data read from the cache, in bytes.""" + read_files_bytes: Optional[int] = None + """Total number of file bytes in all tables read""" + read_files_count: Optional[int] = None """Number of files read after pruning""" @@ -5204,6 +5195,8 @@ def as_dict(self) -> dict: body["read_bytes"] = self.read_bytes if self.read_cache_bytes is not None: body["read_cache_bytes"] = self.read_cache_bytes + if self.read_files_bytes is not None: + body["read_files_bytes"] = self.read_files_bytes if self.read_files_count is not None: body["read_files_count"] = self.read_files_count if self.read_partitions_count is not None: @@ -5265,6 +5258,8 @@ def as_shallow_dict(self) -> dict: body["read_bytes"] = self.read_bytes if self.read_cache_bytes is not None: body["read_cache_bytes"] = self.read_cache_bytes + if self.read_files_bytes is not None: + body["read_files_bytes"] = self.read_files_bytes if self.read_files_count is not None: body["read_files_count"] = self.read_files_count if self.read_partitions_count is not None: @@ -5314,6 +5309,7 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryMetrics: query_compilation_start_timestamp=d.get("query_compilation_start_timestamp", None), read_bytes=d.get("read_bytes", None), read_cache_bytes=d.get("read_cache_bytes", None), + read_files_bytes=d.get("read_files_bytes", None), read_files_count=d.get("read_files_count", None), read_partitions_count=d.get("read_partitions_count", None), read_remote_bytes=d.get("read_remote_bytes", None), @@ -6299,6 +6295,7 @@ class TerminationReasonCode(Enum): DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DBR_IMAGE_RESOLUTION_FAILURE = "DBR_IMAGE_RESOLUTION_FAILURE" DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" @@ -7399,6 +7396,7 @@ def create( :returns: :class:`Alert` """ + body = {} if alert is not None: body["alert"] = alert.as_dict() @@ -7501,6 +7499,7 @@ def update( :returns: :class:`Alert` """ + body = {} if alert is not None: body["alert"] = alert.as_dict() @@ -7562,6 +7561,7 @@ def create( :returns: :class:`LegacyAlert` """ + body = {} if name is not None: body["name"] = name @@ -7661,6 +7661,7 @@ def update(self, alert_id: str, name: str, options: AlertOptions, query_id: str, """ + body = {} if name is not None: body["name"] = name @@ -7691,6 +7692,7 @@ def create_alert(self, alert: AlertV2) -> AlertV2: :returns: :class:`AlertV2` """ + body = alert.as_dict() headers = { "Accept": "application/json", @@ -7777,6 +7779,7 @@ def update_alert(self, id: str, alert: AlertV2, update_mask: str) -> AlertV2: :returns: :class:`AlertV2` """ + body = alert.as_dict() query = {} if update_mask is not None: @@ -7821,6 +7824,7 @@ def create( :returns: :class:`Widget` """ + body = {} if dashboard_id is not None: body["dashboard_id"] = dashboard_id @@ -7882,6 +7886,7 @@ def update( :returns: :class:`Widget` """ + body = {} if dashboard_id is not None: body["dashboard_id"] = dashboard_id @@ -8027,6 +8032,7 @@ def update( :returns: :class:`Dashboard` """ + body = {} if name is not None: body["name"] = name @@ -8147,6 +8153,7 @@ def set( :returns: :class:`SetResponse` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -8179,6 +8186,7 @@ def transfer_ownership( :returns: :class:`Success` """ + body = {} if new_owner is not None: body["new_owner"] = new_owner @@ -8216,6 +8224,7 @@ def create( :returns: :class:`Query` """ + body = {} if auto_resolve_display_name is not None: body["auto_resolve_display_name"] = auto_resolve_display_name @@ -8348,6 +8357,7 @@ def update( :returns: :class:`Query` """ + body = {} if auto_resolve_display_name is not None: body["auto_resolve_display_name"] = auto_resolve_display_name @@ -8427,6 +8437,7 @@ def create( :returns: :class:`LegacyQuery` """ + body = {} if data_source_id is not None: body["data_source_id"] = data_source_id @@ -8622,6 +8633,7 @@ def update( :returns: :class:`LegacyQuery` """ + body = {} if data_source_id is not None: body["data_source_id"] = data_source_id @@ -8715,6 +8727,7 @@ def create(self, *, visualization: Optional[CreateVisualizationRequestVisualizat :returns: :class:`Visualization` """ + body = {} if visualization is not None: body["visualization"] = visualization.as_dict() @@ -8760,6 +8773,7 @@ def update( :returns: :class:`Visualization` """ + body = {} if update_mask is not None: body["update_mask"] = update_mask @@ -8810,6 +8824,7 @@ def create( :returns: :class:`LegacyVisualization` """ + body = {} if description is not None: body["description"] = description @@ -8851,10 +8866,10 @@ def delete(self, id: str): def update( self, - id: str, *, created_at: Optional[str] = None, description: Optional[str] = None, + id: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[LegacyQuery] = None, @@ -8868,11 +8883,11 @@ def update( [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - :param id: str - The UUID for this visualization. :param created_at: str (optional) :param description: str (optional) A short description of this visualization. This is not displayed in the UI. + :param id: str (optional) + The UUID for this visualization. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. :param options: Any (optional) @@ -8885,11 +8900,14 @@ def update( :returns: :class:`LegacyVisualization` """ + body = {} if created_at is not None: body["created_at"] = created_at if description is not None: body["description"] = description + if id is not None: + body["id"] = id if name is not None: body["name"] = name if options is not None: @@ -9223,6 +9241,7 @@ def execute_statement( :returns: :class:`StatementResponse` """ + body = {} if byte_limit is not None: body["byte_limit"] = byte_limit @@ -9451,6 +9470,7 @@ def create( Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ + body = {} if auto_stop_mins is not None: body["auto_stop_mins"] = auto_stop_mins @@ -9620,6 +9640,7 @@ def edit( Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ + body = {} if auto_stop_mins is not None: body["auto_stop_mins"] = auto_stop_mins @@ -9806,6 +9827,7 @@ def set_permissions( :returns: :class:`WarehousePermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -9861,6 +9883,7 @@ def set_workspace_warehouse_config( """ + body = {} if channel is not None: body["channel"] = channel.as_dict() @@ -9943,6 +9966,7 @@ def update_permissions( :returns: :class:`WarehousePermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] diff --git a/databricks/sdk/service/tags.py b/databricks/sdk/service/tags.py index f643984ed..91bdc27ef 100755 --- a/databricks/sdk/service/tags.py +++ b/databricks/sdk/service/tags.py @@ -149,6 +149,7 @@ def create_tag_policy(self, tag_policy: TagPolicy) -> TagPolicy: :returns: :class:`TagPolicy` """ + body = tag_policy.as_dict() headers = { "Accept": "application/json", @@ -238,6 +239,7 @@ def update_tag_policy(self, tag_key: str, tag_policy: TagPolicy, update_mask: st :returns: :class:`TagPolicy` """ + body = tag_policy.as_dict() query = {} if update_mask is not None: diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 4b75889f1..370ce78d4 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -1429,6 +1429,7 @@ def create_endpoint( Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. """ + body = {} if budget_policy_id is not None: body["budget_policy_id"] = budget_policy_id @@ -1529,6 +1530,7 @@ def update_endpoint_budget_policy( :returns: :class:`PatchEndpointBudgetPolicyResponse` """ + body = {} if budget_policy_id is not None: body["budget_policy_id"] = budget_policy_id @@ -1554,6 +1556,7 @@ def update_endpoint_custom_tags( :returns: :class:`UpdateEndpointCustomTagsResponse` """ + body = {} if custom_tags is not None: body["custom_tags"] = [v.as_dict() for v in custom_tags] @@ -1606,6 +1609,7 @@ def create_index( :returns: :class:`VectorIndex` """ + body = {} if delta_sync_index_spec is not None: body["delta_sync_index_spec"] = delta_sync_index_spec.as_dict() @@ -1762,6 +1766,7 @@ def query_index( :returns: :class:`QueryVectorIndexResponse` """ + body = {} if columns is not None: body["columns"] = [v for v in columns] @@ -1804,6 +1809,7 @@ def query_next_page( :returns: :class:`QueryVectorIndexResponse` """ + body = {} if endpoint_name is not None: body["endpoint_name"] = endpoint_name @@ -1834,6 +1840,7 @@ def scan_index( :returns: :class:`ScanVectorIndexResponse` """ + body = {} if last_primary_key is not None: body["last_primary_key"] = last_primary_key @@ -1872,6 +1879,7 @@ def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertD :returns: :class:`UpsertDataVectorIndexResponse` """ + body = {} if inputs_json is not None: body["inputs_json"] = inputs_json diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 1a6f39588..7ef18fddb 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -240,24 +240,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: ) -@dataclass -class CreateScopeResponse: - def as_dict(self) -> dict: - """Serializes the CreateScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateScopeResponse: - """Deserializes the CreateScopeResponse from a dictionary.""" - return cls() - - @dataclass class CredentialInfo: credential_id: int @@ -331,24 +313,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: ) -@dataclass -class DeleteAclResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAclResponse: - """Deserializes the DeleteAclResponse from a dictionary.""" - return cls() - - @dataclass class DeleteCredentialsResponse: def as_dict(self) -> dict: @@ -403,24 +367,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteScopeResponse: - def as_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: - """Deserializes the DeleteScopeResponse from a dictionary.""" - return cls() - - @dataclass class DeleteSecretResponse: def as_dict(self) -> dict: @@ -1032,42 +978,6 @@ class ObjectType(Enum): REPO = "REPO" -@dataclass -class PutAclResponse: - def as_dict(self) -> dict: - """Serializes the PutAclResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: - """Deserializes the PutAclResponse from a dictionary.""" - return cls() - - -@dataclass -class PutSecretResponse: - def as_dict(self) -> dict: - """Serializes the PutSecretResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutSecretResponse: - """Deserializes the PutSecretResponse from a dictionary.""" - return cls() - - @dataclass class RepoAccessControlRequest: group_name: Optional[str] = None @@ -1840,6 +1750,7 @@ def create( :returns: :class:`CreateCredentialsResponse` """ + body = {} if git_email is not None: body["git_email"] = git_email @@ -1948,6 +1859,7 @@ def update( """ + body = {} if git_email is not None: body["git_email"] = git_email @@ -2004,6 +1916,7 @@ def create( :returns: :class:`CreateRepoResponse` """ + body = {} if path is not None: body["path"] = path @@ -2129,6 +2042,7 @@ def set_permissions( :returns: :class:`RepoPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -2165,6 +2079,7 @@ def update( """ + body = {} if branch is not None: body["branch"] = branch @@ -2190,6 +2105,7 @@ def update_permissions( :returns: :class:`RepoPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -2268,6 +2184,7 @@ def create_scope( """ + body = {} if backend_azure_keyvault is not None: body["backend_azure_keyvault"] = backend_azure_keyvault.as_dict() @@ -2305,6 +2222,7 @@ def delete_acl(self, scope: str, principal: str): """ + body = {} if principal is not None: body["principal"] = principal @@ -2334,6 +2252,7 @@ def delete_scope(self, scope: str): """ + body = {} if scope is not None: body["scope"] = scope @@ -2365,6 +2284,7 @@ def delete_secret(self, scope: str, key: str): """ + body = {} if key is not None: body["key"] = key @@ -2586,6 +2506,7 @@ def put_acl(self, scope: str, principal: str, permission: AclPermission): """ + body = {} if permission is not None: body["permission"] = permission.value @@ -2637,6 +2558,7 @@ def put_secret( """ + body = {} if bytes_value is not None: body["bytes_value"] = bytes_value @@ -2679,6 +2601,7 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): """ + body = {} if path is not None: body["path"] = path @@ -2807,7 +2730,7 @@ def import_( If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the - `language` field. + `language` field. Zip files within directories are not supported. :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` @@ -2836,6 +2759,7 @@ def import_( """ + body = {} if content is not None: body["content"] = content @@ -2893,6 +2817,7 @@ def mkdirs(self, path: str): """ + body = {} if path is not None: body["path"] = path @@ -2922,6 +2847,7 @@ def set_permissions( :returns: :class:`WorkspaceObjectPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] @@ -2953,6 +2879,7 @@ def update_permissions( :returns: :class:`WorkspaceObjectPermissions` """ + body = {} if access_control_list is not None: body["access_control_list"] = [v.as_dict() for v in access_control_list] diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 133b16f3d..2a8043172 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - all = a.workspace_assignment.list(list=workspace_id) + all = a.workspace_assignment.list(workspace_id=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - _ = a.workspace_assignment.update( + a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index d63648d58..b71c1707e 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index b9f080e36..41a04deb3 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -24,13 +23,13 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index a79286baf..320c875e1 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -135,11 +135,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: READ_VOLUME :value: "READ_VOLUME" + .. py:attribute:: SELECT + :value: "SELECT" + .. py:attribute:: WRITE_VOLUME :value: "WRITE_VOLUME" .. py:class:: AppManifestAppResourceUcSecurableSpecUcSecurableType + .. py:attribute:: TABLE + :value: "TABLE" + .. py:attribute:: VOLUME :value: "VOLUME" diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index a38b2353f..44209d4b9 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -279,7 +279,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - Next Id: 46 + Next Id: 47 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -577,10 +577,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteAliasResponse - :members: - :undoc-members: - .. autoclass:: DeleteCredentialResponse :members: :undoc-members: @@ -1504,7 +1500,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SecurableKind - Latest kind: CONNECTION_SALESFORCE_OAUTH_MTLS = 268; Next id:269 + Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271 .. py:attribute:: TABLE_DB_STORAGE :value: "TABLE_DB_STORAGE" diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 23312c5d4..d5871432c 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -453,6 +453,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DBFS_DOWN :value: "DBFS_DOWN" + .. py:attribute:: DECOMMISSION_ENDED + :value: "DECOMMISSION_ENDED" + + .. py:attribute:: DECOMMISSION_STARTED + :value: "DECOMMISSION_STARTED" + .. py:attribute:: DID_NOT_EXPAND_DISK :value: "DID_NOT_EXPAND_DISK" @@ -1186,6 +1192,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DBFS_COMPONENT_UNHEALTHY :value: "DBFS_COMPONENT_UNHEALTHY" + .. py:attribute:: DBR_IMAGE_RESOLUTION_FAILURE + :value: "DBR_IMAGE_RESOLUTION_FAILURE" + .. py:attribute:: DISASTER_RECOVERY_REPLICATION :value: "DISASTER_RECOVERY_REPLICATION" diff --git a/docs/dbdataclasses/files.rst b/docs/dbdataclasses/files.rst index 42be15a7b..b6749a8c4 100644 --- a/docs/dbdataclasses/files.rst +++ b/docs/dbdataclasses/files.rst @@ -12,18 +12,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateDirectoryResponse - :members: - :undoc-members: - .. autoclass:: CreateResponse :members: :undoc-members: -.. autoclass:: DeleteDirectoryResponse - :members: - :undoc-members: - .. autoclass:: DeleteResponse :members: :undoc-members: @@ -40,10 +32,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetDirectoryMetadataResponse - :members: - :undoc-members: - .. autoclass:: GetMetadataResponse :members: :undoc-members: @@ -71,7 +59,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: ReadResponse :members: :undoc-members: - -.. autoclass:: UploadResponse - :members: - :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index 142ec986d..ca240158d 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -40,10 +40,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: DeleteWorkspacePermissionAssignmentResponse :members: :undoc-members: @@ -174,10 +170,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPLACE :value: "REPLACE" -.. autoclass:: PatchResponse - :members: - :undoc-members: - .. py:class:: PatchSchema .. py:attribute:: URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index c56480399..04a47acf2 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -20,14 +20,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelAllRunsResponse - :members: - :undoc-members: - -.. autoclass:: CancelRunResponse - :members: - :undoc-members: - .. py:class:: CleanRoomTaskRunLifeCycleState Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition @@ -242,14 +234,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - -.. autoclass:: DeleteRunResponse - :members: - :undoc-members: - .. autoclass:: EnforcePolicyComplianceForJobResponseJobClusterSettingsChange :members: :undoc-members: @@ -608,10 +592,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ResetResponse - :members: - :undoc-members: - .. autoclass:: ResolvedConditionTaskValues :members: :undoc-members: @@ -1128,10 +1108,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE :value: "TABLE" -.. autoclass:: UpdateResponse - :members: - :undoc-members: - .. autoclass:: ViewItem :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 91c4ce5b3..1d8fba9ef 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -94,6 +94,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ContinuousWindow + :members: + :undoc-members: + .. autoclass:: CreateCommentResponse :members: :undoc-members: @@ -868,6 +872,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SlidingWindow + :members: + :undoc-members: + .. py:class:: Status The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -899,6 +907,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: TumblingWindow + :members: + :undoc-members: + .. autoclass:: UpdateCommentResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index 00c961155..cdb9f649b 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -24,10 +24,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: FederationPolicy :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index db6deb399..4fc02060b 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -149,10 +149,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: EmailNotifications :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 0f4a7b3d3..b47a84770 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -279,10 +279,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: DeleteRestrictWorkspaceAdminsSettingResponse :members: :undoc-members: @@ -708,10 +704,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ReplaceResponse - :members: - :undoc-members: - .. autoclass:: RestrictWorkspaceAdminsMessage :members: :undoc-members: @@ -732,10 +724,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SetStatusResponse - :members: - :undoc-members: - .. autoclass:: SlackConfig :members: :undoc-members: @@ -802,10 +790,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateResponse - :members: - :undoc-members: - .. autoclass:: WorkspaceNetworkOption :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index 43a6675e0..a10c32072 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -90,10 +90,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VARIANT :value: "VARIANT" -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: DeltaSharingDependency :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 98cbd832f..865aba6c5 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -66,6 +66,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UNKNOWN :value: "UNKNOWN" +.. py:class:: AlertLifecycleState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DELETED + :value: "DELETED" + .. autoclass:: AlertOperandColumn :members: :undoc-members: @@ -165,10 +173,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelExecutionResponse - :members: - :undoc-members: - .. autoclass:: Channel :members: :undoc-members: @@ -1271,6 +1275,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DBFS_COMPONENT_UNHEALTHY :value: "DBFS_COMPONENT_UNHEALTHY" + .. py:attribute:: DBR_IMAGE_RESOLUTION_FAILURE + :value: "DBR_IMAGE_RESOLUTION_FAILURE" + .. py:attribute:: DISASTER_RECOVERY_REPLICATION :value: "DISASTER_RECOVERY_REPLICATION" diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index db35f519d..4aebf92c5 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -33,18 +33,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateScopeResponse - :members: - :undoc-members: - .. autoclass:: CredentialInfo :members: :undoc-members: -.. autoclass:: DeleteAclResponse - :members: - :undoc-members: - .. autoclass:: DeleteCredentialsResponse :members: :undoc-members: @@ -57,10 +49,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteScopeResponse - :members: - :undoc-members: - .. autoclass:: DeleteSecretResponse :members: :undoc-members: @@ -213,14 +201,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPO :value: "REPO" -.. autoclass:: PutAclResponse - :members: - :undoc-members: - -.. autoclass:: PutSecretResponse - :members: - :undoc-members: - .. autoclass:: RepoAccessControlRequest :members: :undoc-members: diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 47cff9e95..17297d8dd 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -123,8 +123,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index 8051478d3..acfeecd53 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -140,8 +140,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst index ed67f89ac..88a4ace38 100644 --- a/docs/workspace/catalog/credentials.rst +++ b/docs/workspace/catalog/credentials.rst @@ -90,6 +90,10 @@ is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param include_unbound: bool (optional) Whether to include credentials not bound to the workspace. Effective only if the user has permission to update the credential–workspace binding. diff --git a/docs/workspace/catalog/entity_tag_assignments.rst b/docs/workspace/catalog/entity_tag_assignments.rst index fcba2bbae..f1111bffe 100644 --- a/docs/workspace/catalog/entity_tag_assignments.rst +++ b/docs/workspace/catalog/entity_tag_assignments.rst @@ -70,6 +70,10 @@ List tag assignments for an Unity Catalog entity + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param entity_type: str The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, tables, columns, volumes. diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index b8b70227f..612800956 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -107,20 +105,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) - _ = w.external_locations.get(get=created.name) + _ = w.external_locations.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -142,11 +140,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list(catalog.ListExternalLocationsRequest()) + all = w.external_locations.list() Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on @@ -157,8 +154,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst index ff2d0ad14..8c539641d 100644 --- a/docs/workspace/catalog/functions.rst +++ b/docs/workspace/catalog/functions.rst @@ -75,8 +75,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Name of parent catalog for functions of interest. diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst index 64c1b3484..69f2dd6c5 100644 --- a/docs/workspace/catalog/grants.rst +++ b/docs/workspace/catalog/grants.rst @@ -62,6 +62,13 @@ Gets the permissions for a securable. Does not include inherited permissions. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str Type of securable. :param full_name: str @@ -133,6 +140,13 @@ Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str Type of securable. :param full_name: str diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst index a6db85f60..f685976f4 100644 --- a/docs/workspace/catalog/metastores.rst +++ b/docs/workspace/catalog/metastores.rst @@ -179,8 +179,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param max_results: int (optional) Maximum number of metastores to return. - when set to a value greater than 0, the page length is the diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 18aed8bb4..f7432e1e5 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -81,6 +81,10 @@ There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) diff --git a/docs/workspace/catalog/policies.rst b/docs/workspace/catalog/policies.rst index 2eb9e6a99..1855013e7 100644 --- a/docs/workspace/catalog/policies.rst +++ b/docs/workspace/catalog/policies.rst @@ -54,6 +54,10 @@ List all policies defined on a securable. Optionally, the list can include inherited policies defined on the securable's parent schema or catalog. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param on_securable_type: str Required. The type of the securable to list policies for. :param on_securable_fullname: str diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index 947502240..9628d7942 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -136,6 +136,10 @@ There is no guarantee of a specific ordering of the elements in the response. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst index dc7df22ac..ae293fc11 100644 --- a/docs/workspace/catalog/resource_quotas.rst +++ b/docs/workspace/catalog/resource_quotas.rst @@ -33,6 +33,10 @@ ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index f8d676c84..719d5a156 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -125,8 +125,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Parent catalog for schemas of interest. diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 928ad39e5..d8111141e 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,13 @@ w = WorkspaceClient() - created = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) Creates a new storage credential. @@ -98,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(name=created.name) + by_name = w.storage_credentials.get(get=created.name) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. @@ -123,10 +123,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list() + all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore @@ -138,8 +139,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param include_unbound: bool (optional) Whether to include credentials not bound to the workspace. Effective only if the user has permission diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index b9a1c5778..ef6c8f30f 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -45,8 +45,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param metastore_id: str The ID for the metastore in which the system schema resides. diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index c5c3a131d..b33bef940 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -173,8 +173,7 @@ PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while still providing a next_page_token. Clients must continue reading pages until next_page_token is - absent, which is the only indication that the end of results has been reached. This behavior follows - Google AIP-158 guidelines. + absent, which is the only indication that the end of results has been reached. :param catalog_name: str Name of parent catalog for tables of interest. @@ -237,6 +236,10 @@ There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst index bb2f890c2..5566e3e8f 100644 --- a/docs/workspace/catalog/volumes.rst +++ b/docs/workspace/catalog/volumes.rst @@ -141,6 +141,10 @@ There is no guarantee of a specific ordering of the elements in the array. + PAGINATION BEHAVIOR: The API is by default paginated, a page may contain zero results while still + providing a next_page_token. Clients must continue reading pages until next_page_token is absent, + which is the only indication that the end of results has been reached. + :param catalog_name: str The identifier of the catalog :param schema_name: str diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index bda071eb8..eb76a4c37 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -53,6 +53,13 @@ Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. + NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated calls + will be deprecated soon. + + PAGINATION BEHAVIOR: When using pagination (max_results >= 0), a page may contain zero results while + still providing a next_page_token. Clients must continue reading pages until next_page_token is + absent, which is the only indication that the end of results has been reached. + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index d46b8ecd0..db78626ff 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -647,11 +647,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst index 3d01566c6..8a8c60ae3 100644 --- a/docs/workspace/files/files.rst +++ b/docs/workspace/files/files.rst @@ -148,14 +148,14 @@ :returns: Iterator over :class:`DirectoryEntry` - .. py:method:: upload(file_path: str, content: BinaryIO [, overwrite: Optional[bool], part_size: Optional[int], use_parallel: bool = True, parallelism: Optional[int]]) -> UploadStreamResult + .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool], part_size: Optional[int], use_parallel: bool = True, parallelism: Optional[int]]) -> UploadStreamResult Upload a file with stream interface. :param file_path: str The absolute remote path of the target file, e.g. /Volumes/path/to/your/file - :param content: BinaryIO + :param contents: BinaryIO The contents of the file to upload. This must be a BinaryIO stream. :param overwrite: bool (optional) If true, an existing file will be overwritten. When not specified, assumed True. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index 2f95213e2..b2390ce63 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -17,7 +17,7 @@ w = WorkspaceClient() - me2 = w.current_user.me() + me = w.current_user.me() Get details about the current method caller's identity. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 15524c53e..ea24afd1a 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 39beecc1b..0b82986de 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -357,21 +357,23 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - run = w.jobs.submit( - run_name=f"sdk-{time.time_ns()}", + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", tasks=[ - jobs.SubmitTask( + jobs.Task( + description="test", existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key=f"sdk-{time.time_ns()}", + task_key="test", + timeout_seconds=0, ) ], - ).result() + ) - output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) + by_id = w.jobs.get(job_id=created_job.job_id) # cleanup - w.jobs.delete_run(run_id=run.run_id) + w.jobs.delete(job_id=created_job.job_id) Get a single job. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 2d34256e4..98d803a63 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -90,7 +90,7 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -120,7 +120,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. @@ -734,14 +734,13 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + model = w.model_registry.get_model(name=created.registered_model.name) - w.model_registry.update_model_version( + w.model_registry.update_model( + name=model.registered_model_databricks.name, description=f"sdk-{time.time_ns()}", - name=created.model_version.name, - version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 7e0ae7b5f..502061df3 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger], usage_policy_id: Optional[str]]) -> CreatePipelineResponse Usage: @@ -120,6 +120,8 @@ for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. :returns: :class:`CreatePipelineResponse` @@ -380,7 +382,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger], usage_policy_id: Optional[str]]) Usage: @@ -504,6 +506,8 @@ for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. + :param usage_policy_id: str (optional) + Usage policy of this pipeline. diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index fd81e1b24..1a7c88de9 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -101,12 +101,25 @@ .. code-block:: + import time + from databricks.sdk import WorkspaceClient - from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.providers.list(sharing.ListProvidersRequest()) + public_share_recipient = """{ + "shareCredentialsVersion":1, + "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", + "endpoint":"https://sharing.delta.io/delta-sharing/" + } + """ + + created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) + + shares = w.providers.list_shares(name=created.name) + + # cleanup + w.providers.delete(name=created.name) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 2f921319c..9a99049c5 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -194,8 +194,8 @@ # cleanup w.recipients.delete(name=created.name) - Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the - owner of the Recipient. + Gets the share permissions for the specified Recipient. The caller must have the USE_RECIPIENT + privilege on the metastore or be the owner of the Recipient. :param name: str The name of the Recipient. diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index d749bf458..80c5d11b6 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -70,8 +70,8 @@ # cleanup w.shares.delete(name=created_share.name) - Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the - share. + Gets a data object share from the metastore. The caller must have the USE_SHARE privilege on the + metastore or be the owner of the share. :param name: str The name of the share. @@ -95,8 +95,9 @@ all = w.shares.list(sharing.ListSharesRequest()) - Gets an array of data object shares from the metastore. The caller must be a metastore admin or the - owner of the share. There is no guarantee of a specific ordering of the elements in the array. + Gets an array of data object shares from the metastore. If the caller has the USE_SHARE privilege on + the metastore, all shares are returned. Otherwise, only shares owned by the caller are returned. There + is no guarantee of a specific ordering of the elements in the array. :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured @@ -114,11 +115,11 @@ .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetSharePermissionsResponse - Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the - owner of the share. + Gets the permissions for a data share from the metastore. The caller must have the USE_SHARE privilege + on the metastore or be the owner of the share. :param name: str - The name of the share. + The name of the Recipient. :param max_results: int (optional) Maximum number of permissions to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum @@ -219,11 +220,11 @@ .. py:method:: update_permissions(name: str [, changes: Optional[List[PermissionsChange]], omit_permissions_list: Optional[bool]]) -> UpdateSharePermissionsResponse - Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an - owner of the share. + Updates the permissions for a data share in the metastore. The caller must have both the USE_SHARE and + SET_SHARE_PERMISSION privileges on the metastore, or be the owner of the share. - For new recipient grants, the user must also be the recipient owner or metastore admin. recipient - revocations do not require additional privileges. + For new recipient grants, the user must also be the owner of the recipients. recipient revocations do + not require additional privileges. :param name: str The name of the share. diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index f0081b3f2..0dfb63fbf 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SHOW TABLES", + query_text="SELECT 1", ) ) diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst index 56ebe9dfa..e3358c3e2 100644 --- a/docs/workspace/sql/query_visualizations_legacy.rst +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -51,7 +51,7 @@ - .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization + .. py:method:: update( [, created_at: Optional[str], description: Optional[str], id: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization Updates visualization in the query. @@ -60,11 +60,11 @@ [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - :param id: str - The UUID for this visualization. :param created_at: str (optional) :param description: str (optional) A short description of this visualization. This is not displayed in the UI. + :param id: str (optional) + The UUID for this visualization. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. :param options: Any (optional) diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 4fba581e8..e1b7d12b9 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -178,7 +178,7 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=True, + overwrite=true_, path=notebook_path, ) @@ -186,7 +186,7 @@ If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the - `language` field. + `language` field. Zip files within directories are not supported. :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` diff --git a/tests/databricks/sdk/service/httpcallv2.py b/tests/databricks/sdk/service/httpcallv2.py index 60693ac41..d0d381ac7 100755 --- a/tests/databricks/sdk/service/httpcallv2.py +++ b/tests/databricks/sdk/service/httpcallv2.py @@ -120,6 +120,7 @@ def create_resource( :returns: :class:`Resource` """ + body = {} if body_field is not None: body["body_field"] = body_field @@ -215,6 +216,7 @@ def update_resource( :returns: :class:`Resource` """ + body = resource.as_dict() query = {} if field_mask is not None: diff --git a/tests/databricks/sdk/service/idempotencytesting.py b/tests/databricks/sdk/service/idempotencytesting.py index 550795aa2..672efc09c 100755 --- a/tests/databricks/sdk/service/idempotencytesting.py +++ b/tests/databricks/sdk/service/idempotencytesting.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +import uuid from dataclasses import dataclass from typing import Any, Dict, Optional @@ -50,6 +51,8 @@ def __init__(self, api_client): def create_test_resource(self, test_resource: TestResource, *, request_id: Optional[str] = None) -> TestResource: + if request_id is None or request_id == "": + request_id = str(uuid.uuid4()) body = test_resource.as_dict() query = {} if request_id is not None: diff --git a/tests/databricks/sdk/service/lrotesting.py b/tests/databricks/sdk/service/lrotesting.py index 60d70bf3d..679118220 100755 --- a/tests/databricks/sdk/service/lrotesting.py +++ b/tests/databricks/sdk/service/lrotesting.py @@ -325,6 +325,7 @@ def create_test_resource(self, resource: TestResource) -> CreateTestResourceOper :returns: :class:`Operation` """ + body = resource.as_dict() headers = { "Accept": "application/json", diff --git a/tests/generated/test_idempotency.py b/tests/generated/test_idempotency.py new file mode 100755 index 000000000..07308c4d3 --- /dev/null +++ b/tests/generated/test_idempotency.py @@ -0,0 +1,134 @@ +# Code generated by Databricks SDK Generator. DO NOT EDIT. + +import pytest + +from databricks.sdk.core import ApiClient +from tests.databricks.sdk.service.idempotencytesting import ( + IdempotencyTestingAPI, TestResource) + + +@pytest.mark.parametrize( + "setup_mocks,make_call,want_result,expected_request_id", + [ + pytest.param( + lambda requests_mock: requests_mock.post( + "http://localhost/api/2.0/idempotency-testing/resources", + [ + { + "status_code": 503, + "json": {"error_code": "TEMPORARILY_UNAVAILABLE", "message": "Service temporarily unavailable"}, + }, + {"status_code": 200, "json": {"id": "test-resource-123", "name": "test-resource"}}, + ], + ), + lambda client: client.create_test_resource( + request_id="test-request-id-12345", + test_resource=TestResource( + id="test-resource-123", + name="test-resource", + ), + ), + TestResource( + id="test-resource-123", + name="test-resource", + ), + "test-request-id-12345", + id="RetryWithProvidedRequestID", + ), + ], +) +def test_idempotency_retry_with_same_request_id( + config, requests_mock, setup_mocks, make_call, want_result, expected_request_id +): + """Test that retries on 503 use the same request_id for idempotency""" + setup_mocks(requests_mock) + + api_client = ApiClient(config) + c = IdempotencyTestingAPI(api_client) + + # Make the call - should retry automatically on 503 + result = make_call(c) + + # Verify the result + assert result == want_result + + # Verify two requests were made + assert requests_mock.call_count == 2, f"Expected 2 calls (original + retry), got {requests_mock.call_count}" + + # Verify both requests used the expected request_id for idempotency + first_request_id = requests_mock.request_history[0].qs.get("request_id", [None])[0] + second_request_id = requests_mock.request_history[1].qs.get("request_id", [None])[0] + + assert ( + first_request_id == expected_request_id + ), f"First request should use provided request_id, got: {first_request_id}" + assert ( + second_request_id == expected_request_id + ), f"Retry should reuse same request_id for idempotency, got: {second_request_id}" + + +@pytest.mark.parametrize( + "make_call,want_result,fixtures", + [ + pytest.param( + lambda client: client.create_test_resource( + test_resource=TestResource( + id="test-resource-123", + name="test-resource", + ) + ), + TestResource( + id="test-resource-123", + name="test-resource", + ), + [ + { + "status_code": 503, + "json": {"error_code": "TEMPORARILY_UNAVAILABLE", "message": "Service temporarily unavailable"}, + }, + {"status_code": 200, "json": {"id": "test-resource-123", "name": "test-resource"}}, + ], + id="RetryWithAutoGeneratedRequestID", + ), + ], +) +def test_idempotency_auto_generated_request_id(config, requests_mock, make_call, want_result, fixtures): + """Test that SDK auto-generates request_id and reuses it on retry""" + captured_request_ids = [] + + def mock_handler(request, context): + request_id = request.qs.get("request_id", [None])[0] + captured_request_ids.append(request_id) + + if len(captured_request_ids) == 1: + # First call: capture request_id and return first fixture + context.status_code = fixtures[0]["status_code"] + return fixtures[0]["json"] + elif len(captured_request_ids) == 2: + # Second call: validate same request_id and return second fixture + context.status_code = fixtures[1]["status_code"] + return fixtures[1]["json"] + else: + context.status_code = 500 + return {"error": "Unexpected call"} + + requests_mock.post("http://localhost/api/2.0/idempotency-testing/resources", json=mock_handler) + + api_client = ApiClient(config) + c = IdempotencyTestingAPI(api_client) + + result = make_call(c) + + # Verify the result + assert result == want_result + + # Verify exactly 2 calls were made (initial + retry) + assert requests_mock.call_count == 2, f"Expected 2 calls, got {requests_mock.call_count}" + + # Verify SDK auto-generated request_id + assert len(captured_request_ids) == 2, f"Expected 2 captured request IDs, got {len(captured_request_ids)}" + assert captured_request_ids[0] is not None, "First request should have auto-generated request_id" + assert captured_request_ids[1] is not None, "Retry request should have auto-generated request_id" + assert ( + captured_request_ids[0] == captured_request_ids[1] + ), f"Retry should use same auto-generated request_id. Got: {captured_request_ids}"