diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a68714d08..9834ec958 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -3a7fe4deb693ca98d89b044116aaf008efd895a5 \ No newline at end of file +3ae6f76120079424c8654263eafbc30ec0551854 \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 2fdb85ae0..cb2d6d306 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,14 @@ ### Internal Changes ### API Changes +* Added [w.policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/policies.html) workspace-level service and [w.temporary_path_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/temporary_path_credentials.html) workspace-level service. +* Added `create()` method for [w.tables](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/tables.html) workspace-level service. +* Added `list_database_catalogs()`, `list_synced_database_tables()`, `update_database_catalog()` and `update_synced_database_table()` methods for [w.database](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/database/database.html) workspace-level service. +* Added `first_on_demand` field for `databricks.sdk.service.compute.GcpAttributes`. +* Added `usage_policy_id` field for `databricks.sdk.service.jobs.CreateJob`. +* Added `usage_policy_id` field for `databricks.sdk.service.jobs.JobSettings`. +* Added `usage_policy_id` field for `databricks.sdk.service.jobs.SubmitRun`. +* Added `client_request_id` and `usage_context` fields for `databricks.sdk.service.serving.QueryEndpointInput`. +* Added `channel_id`, `channel_id_set`, `oauth_token` and `oauth_token_set` fields for `databricks.sdk.service.settings.SlackConfig`. +* Added `snapshot` enum value for `databricks.sdk.service.ml.PublishSpecPublishMode`. +* [Breaking] Changed `publish_mode` field for `databricks.sdk.service.ml.PublishSpec` to be required. \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 7c0bd489c..288762efe 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -51,12 +51,13 @@ ExternalMetadataAPI, FunctionsAPI, GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, - QualityMonitorsAPI, + PoliciesAPI, QualityMonitorsAPI, RegisteredModelsAPI, ResourceQuotasAPI, SchemasAPI, StorageCredentialsAPI, SystemSchemasAPI, TableConstraintsAPI, TablesAPI, + TemporaryPathCredentialsAPI, TemporaryTableCredentialsAPI, VolumesAPI, WorkspaceBindingsAPI) from databricks.sdk.service.cleanrooms import (CleanRoomAssetRevisionsAPI, @@ -300,6 +301,7 @@ def __init__( self._permission_migration = pkg_iam.PermissionMigrationAPI(self._api_client) self._permissions = pkg_iam.PermissionsAPI(self._api_client) self._pipelines = pkg_pipelines.PipelinesAPI(self._api_client) + self._policies = pkg_catalog.PoliciesAPI(self._api_client) self._policy_compliance_for_clusters = pkg_compute.PolicyComplianceForClustersAPI(self._api_client) self._policy_compliance_for_jobs = pkg_jobs.PolicyComplianceForJobsAPI(self._api_client) self._policy_families = pkg_compute.PolicyFamiliesAPI(self._api_client) @@ -345,6 +347,7 @@ def __init__( self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client) self._table_constraints = pkg_catalog.TableConstraintsAPI(self._api_client) self._tables = pkg_catalog.TablesAPI(self._api_client) + self._temporary_path_credentials = pkg_catalog.TemporaryPathCredentialsAPI(self._api_client) self._temporary_table_credentials = pkg_catalog.TemporaryTableCredentialsAPI(self._api_client) self._token_management = pkg_settings.TokenManagementAPI(self._api_client) self._tokens = pkg_settings.TokensAPI(self._api_client) @@ -670,6 +673,11 @@ def pipelines(self) -> pkg_pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines + @property + def policies(self) -> pkg_catalog.PoliciesAPI: + """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies in Unity Catalog.""" + return self._policies + @property def policy_compliance_for_clusters(self) -> pkg_compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" @@ -860,9 +868,14 @@ def tables(self) -> pkg_catalog.TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables + @property + def temporary_path_credentials(self) -> pkg_catalog.TemporaryPathCredentialsAPI: + """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud storage locations registered in Databricks.""" + return self._temporary_path_credentials + @property def temporary_table_credentials(self) -> pkg_catalog.TemporaryTableCredentialsAPI: - """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" + """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locations where table data is stored in Databricks.""" return self._temporary_table_credentials @property diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 797b353de..22caa3809 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -32,8 +32,6 @@ class App: app_status: Optional[ApplicationStatus] = None budget_policy_id: Optional[str] = None - """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the - new usage_policy_id is properly populated and used.""" compute_status: Optional[ComputeStatus] = None @@ -51,8 +49,6 @@ class App: """The description of the app.""" effective_budget_policy_id: Optional[str] = None - """TODO: Deprecate this field after serverless entitlements are released to all prod stages and the - new usage_policy_id is properly populated and used.""" effective_user_api_scopes: Optional[List[str]] = None """The effective api scopes granted to the user access token.""" diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 10dc41468..255fa6fa5 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1138,6 +1138,55 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) +@dataclass +class ColumnMaskOptions: + function_name: str + """The fully qualified name of the column mask function. The function is called on each row of the + target table. The function's first argument and its return type should match the type of the + masked column. Required on create and update.""" + + on_column: str + """The alias of the column to be masked. The alias must refer to one of matched columns. The values + of the column is passed to the column mask function as the first argument. Required on create + and update.""" + + using: Optional[List[FunctionArgument]] = None + """Optional list of column aliases or constant literals to be passed as additional arguments to the + column mask function. The type of each column should match the positional argument of the column + mask function.""" + + def as_dict(self) -> dict: + """Serializes the ColumnMaskOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.on_column is not None: + body["on_column"] = self.on_column + if self.using: + body["using"] = [v.as_dict() for v in self.using] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ColumnMaskOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.on_column is not None: + body["on_column"] = self.on_column + if self.using: + body["using"] = self.using + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ColumnMaskOptions: + """Deserializes the ColumnMaskOptions from a dictionary.""" + return cls( + function_name=d.get("function_name", None), + on_column=d.get("on_column", None), + using=_repeated_dict(d, "using", FunctionArgument), + ) + + @dataclass class ColumnRelationship: source: Optional[str] = None @@ -2378,6 +2427,24 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteMonitorResponse: return cls() +@dataclass +class DeletePolicyResponse: + def as_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: + """Deserializes the DeletePolicyResponse from a dictionary.""" + return cls() + + @dataclass class DeleteRequestExternalLineage: source: ExternalLineageObject @@ -3830,6 +3897,38 @@ def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: ) +@dataclass +class FunctionArgument: + alias: Optional[str] = None + """The alias of a matched column.""" + + constant: Optional[str] = None + """A constant literal.""" + + def as_dict(self) -> dict: + """Serializes the FunctionArgument into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.constant is not None: + body["constant"] = self.constant + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FunctionArgument into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.constant is not None: + body["constant"] = self.constant + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FunctionArgument: + """Deserializes the FunctionArgument from a dictionary.""" + return cls(alias=d.get("alias", None), constant=d.get("constant", None)) + + @dataclass class FunctionDependency: """A function that is dependent on a SQL object.""" @@ -4362,6 +4461,77 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: ) +@dataclass +class GenerateTemporaryPathCredentialResponse: + aws_temp_credentials: Optional[AwsCredentials] = None + + azure_aad: Optional[AzureActiveDirectoryToken] = None + + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None + + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" + + gcp_oauth_token: Optional[GcpOauthToken] = None + + r2_temp_credentials: Optional[R2Credentials] = None + + url: Optional[str] = None + """The URL of the storage path accessible by the temporary credential.""" + + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryPathCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryPathCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryPathCredentialResponse: + """Deserializes the GenerateTemporaryPathCredentialResponse from a dictionary.""" + return cls( + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), + url=d.get("url", None), + ) + + @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """The Azure cloud options to customize the requested temporary credential""" @@ -5162,6 +5332,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: ) +@dataclass +class ListPoliciesResponse: + next_page_token: Optional[str] = None + """Optional opaque token for continuing pagination. `page_token` should be set to this value for + the next request to retrieve the next page of results.""" + + policies: Optional[List[PolicyInfo]] = None + """The list of retrieved policies.""" + + def as_dict(self) -> dict: + """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: + """Deserializes the ListPoliciesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", PolicyInfo)) + + @dataclass class ListQuotasResponse: next_page_token: Optional[str] = None @@ -5432,6 +5635,38 @@ def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) +@dataclass +class MatchColumn: + alias: Optional[str] = None + """Optional alias of the matched column.""" + + condition: Optional[str] = None + """The condition expression used to match a table column.""" + + def as_dict(self) -> dict: + """Serializes the MatchColumn into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.condition is not None: + body["condition"] = self.condition + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MatchColumn into a shallow dictionary of its immediate attributes.""" + body = {} + if self.alias is not None: + body["alias"] = self.alias + if self.condition is not None: + body["condition"] = self.condition + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MatchColumn: + """Deserializes the MatchColumn from a dictionary.""" + return cls(alias=d.get("alias", None), condition=d.get("condition", None)) + + class MatchType(Enum): """The artifact pattern matching type""" @@ -6954,6 +7189,13 @@ class OptionSpecOptionType(Enum): OPTION_STRING = "OPTION_STRING" +class PathOperation(Enum): + + PATH_CREATE_TABLE = "PATH_CREATE_TABLE" + PATH_READ = "PATH_READ" + PATH_READ_WRITE = "PATH_READ_WRITE" + + @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None @@ -7060,6 +7302,178 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: ) +@dataclass +class PolicyInfo: + to_principals: List[str] + """List of user or group names that the policy applies to. Required on create and optional on + update.""" + + for_securable_type: SecurableType + """Type of securables that the policy should take effect on. Only `table` is supported at this + moment. Required on create and optional on update.""" + + policy_type: PolicyType + """Type of the policy. Required on create and ignored on update.""" + + column_mask: Optional[ColumnMaskOptions] = None + """Options for column mask policies. Valid only if `policy_type` is `POLICY_TYPE_COLUMN_MASK`. + Required on create and optional on update. When specified on update, the new options will + replace the existing options as a whole.""" + + comment: Optional[str] = None + """Optional description of the policy.""" + + created_at: Optional[int] = None + """Time at which the policy was created, in epoch milliseconds. Output only.""" + + created_by: Optional[str] = None + """Username of the user who created the policy. Output only.""" + + except_principals: Optional[List[str]] = None + """Optional list of user or group names that should be excluded from the policy.""" + + id: Optional[str] = None + """Unique identifier of the policy. This field is output only and is generated by the system.""" + + match_columns: Optional[List[MatchColumn]] = None + """Optional list of condition expressions used to match table columns. Only valid when + `for_securable_type` is `table`. When specified, the policy only applies to tables whose columns + satisfy all match conditions.""" + + name: Optional[str] = None + """Name of the policy. Required on create and ignored on update. To update the name, use the + `new_name` field.""" + + on_securable_fullname: Optional[str] = None + """Full name of the securable on which the policy is defined. Required on create and ignored on + update.""" + + on_securable_type: Optional[SecurableType] = None + """Type of the securable on which the policy is defined. Only `catalog`, `schema` and `table` are + supported at this moment. Required on create and ignored on update.""" + + row_filter: Optional[RowFilterOptions] = None + """Options for row filter policies. Valid only if `policy_type` is `POLICY_TYPE_ROW_FILTER`. + Required on create and optional on update. When specified on update, the new options will + replace the existing options as a whole.""" + + updated_at: Optional[int] = None + """Time at which the policy was last modified, in epoch milliseconds. Output only.""" + + updated_by: Optional[str] = None + """Username of the user who last modified the policy. Output only.""" + + when_condition: Optional[str] = None + """Optional condition when the policy should take effect.""" + + def as_dict(self) -> dict: + """Serializes the PolicyInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column_mask: + body["column_mask"] = self.column_mask.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.except_principals: + body["except_principals"] = [v for v in self.except_principals] + if self.for_securable_type is not None: + body["for_securable_type"] = self.for_securable_type.value + if self.id is not None: + body["id"] = self.id + if self.match_columns: + body["match_columns"] = [v.as_dict() for v in self.match_columns] + if self.name is not None: + body["name"] = self.name + if self.on_securable_fullname is not None: + body["on_securable_fullname"] = self.on_securable_fullname + if self.on_securable_type is not None: + body["on_securable_type"] = self.on_securable_type.value + if self.policy_type is not None: + body["policy_type"] = self.policy_type.value + if self.row_filter: + body["row_filter"] = self.row_filter.as_dict() + if self.to_principals: + body["to_principals"] = [v for v in self.to_principals] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.when_condition is not None: + body["when_condition"] = self.when_condition + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PolicyInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.column_mask: + body["column_mask"] = self.column_mask + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.except_principals: + body["except_principals"] = self.except_principals + if self.for_securable_type is not None: + body["for_securable_type"] = self.for_securable_type + if self.id is not None: + body["id"] = self.id + if self.match_columns: + body["match_columns"] = self.match_columns + if self.name is not None: + body["name"] = self.name + if self.on_securable_fullname is not None: + body["on_securable_fullname"] = self.on_securable_fullname + if self.on_securable_type is not None: + body["on_securable_type"] = self.on_securable_type + if self.policy_type is not None: + body["policy_type"] = self.policy_type + if self.row_filter: + body["row_filter"] = self.row_filter + if self.to_principals: + body["to_principals"] = self.to_principals + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.when_condition is not None: + body["when_condition"] = self.when_condition + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PolicyInfo: + """Deserializes the PolicyInfo from a dictionary.""" + return cls( + column_mask=_from_dict(d, "column_mask", ColumnMaskOptions), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + except_principals=d.get("except_principals", None), + for_securable_type=_enum(d, "for_securable_type", SecurableType), + id=d.get("id", None), + match_columns=_repeated_dict(d, "match_columns", MatchColumn), + name=d.get("name", None), + on_securable_fullname=d.get("on_securable_fullname", None), + on_securable_type=_enum(d, "on_securable_type", SecurableType), + policy_type=_enum(d, "policy_type", PolicyType), + row_filter=_from_dict(d, "row_filter", RowFilterOptions), + to_principals=d.get("to_principals", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + when_condition=d.get("when_condition", None), + ) + + +class PolicyType(Enum): + + POLICY_TYPE_COLUMN_MASK = "POLICY_TYPE_COLUMN_MASK" + POLICY_TYPE_ROW_FILTER = "POLICY_TYPE_ROW_FILTER" + + @dataclass class PrimaryKeyConstraint: name: str @@ -7573,6 +7987,42 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: ) +@dataclass +class RowFilterOptions: + function_name: str + """The fully qualified name of the row filter function. The function is called on each row of the + target table. It should return a boolean value indicating whether the row should be visible to + the user. Required on create and update.""" + + using: Optional[List[FunctionArgument]] = None + """Optional list of column aliases or constant literals to be passed as arguments to the row filter + function. The type of each column should match the positional argument of the row filter + function.""" + + def as_dict(self) -> dict: + """Serializes the RowFilterOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using: + body["using"] = [v.as_dict() for v in self.using] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RowFilterOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using: + body["using"] = self.using + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RowFilterOptions: + """Deserializes the RowFilterOptions from a dictionary.""" + return cls(function_name=d.get("function_name", None), using=_repeated_dict(d, "using", FunctionArgument)) + + @dataclass class SchemaInfo: """Next ID: 40""" @@ -8113,7 +8563,7 @@ class SystemSchemaInfo: state: str """The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | - ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" + ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE | MANAGED""" def as_dict(self) -> dict: """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" @@ -12175,6 +12625,185 @@ def get(self, name: str) -> OnlineTable: return OnlineTable.from_dict(res) +class PoliciesAPI: + """Attribute-Based Access Control (ABAC) provides high leverage governance for enforcing compliance policies + in Unity Catalog. With ABAC policies, access is controlled in a hierarchical and scalable manner, based on + data attributes rather than specific resources, enabling more flexible and comprehensive access control. + ABAC policies in Unity Catalog support conditions on securable properties, governance tags, and + environment contexts. Callers must have the `MANAGE` privilege on a securable to view, create, update, or + delete ABAC policies.""" + + def __init__(self, api_client): + self._api = api_client + + def create_policy(self, policy_info: PolicyInfo) -> PolicyInfo: + """Creates a new policy on a securable. The new policy applies to the securable and all its descendants. + + :param policy_info: :class:`PolicyInfo` + Required. The policy to create. + + :returns: :class:`PolicyInfo` + """ + body = policy_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/policies", body=body, headers=headers) + return PolicyInfo.from_dict(res) + + def delete_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> DeletePolicyResponse: + """Delete an ABAC policy defined on a securable. + + :param on_securable_type: str + Required. The type of the securable to delete the policy from. + :param on_securable_fullname: str + Required. The fully qualified name of the securable to delete the policy from. + :param name: str + Required. The name of the policy to delete + + :returns: :class:`DeletePolicyResponse` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + headers=headers, + ) + return DeletePolicyResponse.from_dict(res) + + def get_policy(self, on_securable_type: str, on_securable_fullname: str, name: str) -> PolicyInfo: + """Get the policy definition on a securable + + :param on_securable_type: str + Required. The type of the securable to retrieve the policy for. + :param on_securable_fullname: str + Required. The fully qualified name of securable to retrieve policy for. + :param name: str + Required. The name of the policy to retrieve. + + :returns: :class:`PolicyInfo` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + headers=headers, + ) + return PolicyInfo.from_dict(res) + + def list_policies( + self, + on_securable_type: str, + on_securable_fullname: str, + *, + include_inherited: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[PolicyInfo]: + """List all policies defined on a securable. Optionally, the list can include inherited policies defined + on the securable's parent schema or catalog. + + :param on_securable_type: str + Required. The type of the securable to list policies for. + :param on_securable_fullname: str + Required. The fully qualified name of securable to list policies for. + :param include_inherited: bool (optional) + Optional. Whether to include policies defined on parent securables. By default, the inherited + policies are not included. + :param max_results: int (optional) + Optional. Maximum number of policies to return on a single page (page length). - When not set or set + to 0, the page length is set to a server configured value (recommended); - When set to a value + greater than 0, the page length is the minimum of this value and a server configured value; + :param page_token: str (optional) + Optional. Opaque pagination token to go to next page based on previous query. + + :returns: Iterator over :class:`PolicyInfo` + """ + + query = {} + if include_inherited is not None: + query["include_inherited"] = include_inherited + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}", + query=query, + headers=headers, + ) + if "policies" in json: + for v in json["policies"]: + yield PolicyInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_policy( + self, + on_securable_type: str, + on_securable_fullname: str, + name: str, + policy_info: PolicyInfo, + *, + update_mask: Optional[str] = None, + ) -> PolicyInfo: + """Update an ABAC policy on a securable. + + :param on_securable_type: str + Required. The type of the securable to update the policy for. + :param on_securable_fullname: str + Required. The fully qualified name of the securable to update the policy for. + :param name: str + Required. The name of the policy to update. + :param policy_info: :class:`PolicyInfo` + Optional fields to update. This is the request body for updating a policy. Use `update_mask` field + to specify which fields in the request is to be updated. - If `update_mask` is empty or "*", all + specified fields will be updated. - If `update_mask` is specified, only the fields specified in the + `update_mask` will be updated. If a field is specified in `update_mask` and not set in the request, + the field will be cleared. Users can use the update mask to explicitly unset optional fields such as + `exception_principals` and `when_condition`. + :param update_mask: str (optional) + Optional. The update mask field for specifying user intentions on which fields to update in the + request. + + :returns: :class:`PolicyInfo` + """ + body = policy_info.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/unity-catalog/policies/{on_securable_type}/{on_securable_fullname}/{name}", + query=query, + body=body, + headers=headers, + ) + return PolicyInfo.from_dict(res) + + class QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. Most write operations @@ -12960,7 +13589,7 @@ def create( properties: Optional[Dict[str, str]] = None, storage_root: Optional[str] = None, ) -> SchemaInfo: - """Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the + """Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. :param name: str @@ -13642,6 +14271,79 @@ class TablesAPI: def __init__(self, api_client): self._api = api_client + def create( + self, + name: str, + catalog_name: str, + schema_name: str, + table_type: TableType, + data_source_format: DataSourceFormat, + storage_location: str, + *, + columns: Optional[List[ColumnInfo]] = None, + properties: Optional[Dict[str, str]] = None, + ) -> TableInfo: + """Creates a new table in the specified catalog and schema. + + To create an external delta table, the caller must have the **EXTERNAL_USE_SCHEMA** privilege on the + parent schema and the **EXTERNAL_USE_LOCATION** privilege on the external location. These privileges + must always be granted explicitly, and cannot be inherited through ownership or **ALL_PRIVILEGES**. + + Standard UC permissions needed to create tables still apply: **USE_CATALOG** on the parent catalog (or + ownership of the parent catalog), **CREATE_TABLE** and **USE_SCHEMA** on the parent schema (or + ownership of the parent schema), and **CREATE_EXTERNAL_TABLE** on external location. + + The **columns** field needs to be in a Spark compatible format, so we recommend you use Spark to + create these tables. The API itself does not validate the correctness of the column spec. If the spec + is not Spark compatible, the tables may not be readable by Databricks Runtime. + + NOTE: The Create Table API for external clients only supports creating **external delta tables**. The + values shown in the respective enums are all values supported by Databricks, however for this specific + Create Table API, only **table_type** **EXTERNAL** and **data_source_format** **DELTA** are supported. + Additionally, column masks are not supported when creating tables through this API. + + :param name: str + Name of table, relative to parent schema. + :param catalog_name: str + Name of parent catalog. + :param schema_name: str + Name of parent schema relative to its parent catalog. + :param table_type: :class:`TableType` + :param data_source_format: :class:`DataSourceFormat` + :param storage_location: str + Storage root URL for table (for **MANAGED**, **EXTERNAL** tables). + :param columns: List[:class:`ColumnInfo`] (optional) + The array of __ColumnInfo__ definitions of the table's columns. + :param properties: Dict[str,str] (optional) + A map of key-value properties attached to the securable. + + :returns: :class:`TableInfo` + """ + body = {} + if catalog_name is not None: + body["catalog_name"] = catalog_name + if columns is not None: + body["columns"] = [v.as_dict() for v in columns] + if data_source_format is not None: + body["data_source_format"] = data_source_format.value + if name is not None: + body["name"] = name + if properties is not None: + body["properties"] = properties + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + if table_type is not None: + body["table_type"] = table_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/tables", body=body, headers=headers) + return TableInfo.from_dict(res) + def delete(self, full_name: str): """Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the @@ -13663,10 +14365,10 @@ def delete(self, full_name: str): def exists(self, full_name: str) -> TableExistsResponse: """Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the - owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the + owner of the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, - and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on - the parent catalog * Have BROWSE privilege on the parent schema. + and either be the table owner or have the **SELECT** privilege on the table. * Have **BROWSE** + privilege on the parent catalog * Have **BROWSE** privilege on the parent schema :param full_name: str Full name of the table. @@ -13691,9 +14393,9 @@ def get( ) -> TableInfo: """Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of - the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** - privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be - the table owner or have the SELECT privilege on the table. + the parent schema and have the **USE_CATALOG** privilege on the parent catalog * Have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, + and either be the table owner or have the **SELECT** privilege on the table. :param full_name: str Full name of the table. @@ -13891,19 +14593,86 @@ def update(self, full_name: str, *, owner: Optional[str] = None): self._api.do("PATCH", f"/api/2.1/unity-catalog/tables/{full_name}", body=body, headers=headers) +class TemporaryPathCredentialsAPI: + """Temporary Path Credentials refer to short-lived, downscoped credentials used to access external cloud + storage locations registered in Databricks. These credentials are employed to provide secure and + time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud + provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service + (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud + supports temporary credentials through OAuth 2.0. + + Temporary path credentials ensure that data access is limited in scope and duration, reducing the risk of + unauthorized access or misuse. To use the temporary path credentials API, a metastore admin needs to + enable the external_access_enabled flag (off by default) at the metastore level. A user needs to be + granted the EXTERNAL USE LOCATION permission by external location owner. For requests on existing external + tables, user also needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog + admin. + + Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog admin + explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for security reasons. + Similarly, EXTERNAL USE LOCATION is an external location level permission that can only be granted by + external location owner explicitly and is not included in external location ownership or ALL PRIVILEGES on + the external location for security reasons. + + This API only supports temporary path credentials for external locations and external tables, and volumes + will be supported in the future.""" + + def __init__(self, api_client): + self._api = api_client + + def generate_temporary_path_credentials( + self, url: str, operation: PathOperation, *, dry_run: Optional[bool] = None + ) -> GenerateTemporaryPathCredentialResponse: + """Get a short-lived credential for directly accessing cloud storage locations registered in Databricks. + The Generate Temporary Path Credentials API is only supported for external storage paths, specifically + external locations and external tables. Managed tables are not supported by this API. The metastore + must have **external_access_enabled** flag set to true (default false). The caller must have the + **EXTERNAL_USE_LOCATION** privilege on the external location; this privilege can only be granted by + external location owners. For requests on existing external tables, the caller must also have the + **EXTERNAL_USE_SCHEMA** privilege on the parent schema; this privilege can only be granted by catalog + owners. + + :param url: str + URL for path-based access. + :param operation: :class:`PathOperation` + The operation being performed on the path. + :param dry_run: bool (optional) + Optional. When set to true, the service will not validate that the generated credentials can perform + write operations, therefore no new paths will be created and the response will not contain valid + credentials. Defaults to false. + + :returns: :class:`GenerateTemporaryPathCredentialResponse` + """ + body = {} + if dry_run is not None: + body["dry_run"] = dry_run + if operation is not None: + body["operation"] = operation.value + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/unity-catalog/temporary-path-credentials", body=body, headers=headers) + return GenerateTemporaryPathCredentialResponse.from_dict(res) + + class TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage - locationswhere table data is stored in Databricks. These credentials are employed to provide secure and - time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider - has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS), - Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports - temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in - scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table - credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at - the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level - by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by - catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for - security reason.""" + locations where table data is stored in Databricks. These credentials are employed to provide secure and + time-limited access to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud + provider has its own type of credentials: AWS uses temporary session tokens via AWS Security Token Service + (STS), Azure utilizes Shared Access Signatures (SAS) for its data storage services, and Google Cloud + supports temporary credentials through OAuth 2.0. + + Temporary table credentials ensure that data access is limited in scope and duration, reducing the risk of + unauthorized access or misuse. To use the temporary table credentials API, a metastore admin needs to + enable the external_access_enabled flag (off by default) at the metastore level, and user needs to be + granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin. Note that EXTERNAL USE + SCHEMA is a schema level permission that can only be granted by catalog admin explicitly and is not + included in schema ownership or ALL PRIVILEGES on the schema for security reasons.""" def __init__(self, api_client): self._api = api_client @@ -13912,9 +14681,9 @@ def generate_temporary_table_credentials( self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None ) -> GenerateTemporaryTableCredentialResponse: """Get a short-lived credential for directly accessing the table data on cloud storage. The metastore - must have external_access_enabled flag set to true (default false). The caller must have - EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog - owners. + must have **external_access_enabled** flag set to true (default false). The caller must have the + **EXTERNAL_USE_SCHEMA** privilege on the parent schema and this privilege can only be granted by + catalog owners. :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 06ca9c2fe..678f5bf9a 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -142,7 +142,8 @@ class CleanRoomAsset: For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name.""" + For notebooks, the name is the notebook file name. For jar analyses, the name is the jar + analysis name.""" asset_type: CleanRoomAssetAssetType """The type of the asset.""" @@ -351,7 +352,7 @@ class CleanRoomAssetNotebook: """Server generated etag that represents the notebook version.""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """top-level status derived from all reviews""" + """Top-level status derived from all reviews""" reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing approvals or rejections""" @@ -546,8 +547,12 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetVolumeLocalDetails: @dataclass class CleanRoomAutoApprovalRule: author_collaborator_alias: Optional[str] = None + """Collaborator alias of the author covered by the rule. Only one of `author_collaborator_alias` + and `author_scope` can be set.""" author_scope: Optional[CleanRoomAutoApprovalRuleAuthorScope] = None + """Scope of authors covered by the rule. Only one of `author_collaborator_alias` and `author_scope` + can be set.""" clean_room_name: Optional[str] = None """The name of the clean room this auto-approval rule belongs to.""" @@ -562,6 +567,7 @@ class CleanRoomAutoApprovalRule: """The owner of the rule to whom the rule applies.""" runner_collaborator_alias: Optional[str] = None + """Collaborator alias of the runner covered by the rule.""" def as_dict(self) -> dict: """Serializes the CleanRoomAutoApprovalRule into a dictionary suitable for use as a JSON request body.""" @@ -704,19 +710,19 @@ def from_dict(cls, d: Dict[str, Any]) -> CleanRoomCollaborator: @dataclass class CleanRoomNotebookReview: comment: Optional[str] = None - """review comment""" + """Review comment""" created_at_millis: Optional[int] = None - """timestamp of when the review was submitted""" + """When the review was submitted, in epoch milliseconds""" review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """review outcome""" + """Review outcome""" review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None - """specified when the review was not explicitly made by a user""" + """Specified when the review was not explicitly made by a user""" reviewer_collaborator_alias: Optional[str] = None - """collaborator alias of the reviewer""" + """Collaborator alias of the reviewer""" def as_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body.""" @@ -1100,7 +1106,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: @dataclass class CreateCleanRoomAssetReviewResponse: notebook_review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None - """top-level status derived from all reviews""" + """Top-level status derived from all reviews""" notebook_reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing notebook approvals or rejections""" @@ -1348,13 +1354,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: @dataclass class NotebookVersionReview: etag: str - """etag that identifies the notebook version""" + """Etag identifying the notebook version""" review_state: CleanRoomNotebookReviewNotebookReviewState - """review outcome""" + """Review outcome""" comment: Optional[str] = None - """review comment""" + """Review comment""" def as_dict(self) -> dict: """Serializes the NotebookVersionReview into a dictionary suitable for use as a JSON request body.""" @@ -1505,12 +1511,12 @@ def create_clean_room_asset_review( name: str, notebook_review: NotebookVersionReview, ) -> CreateCleanRoomAssetReviewResponse: - """submit an asset review + """Submit an asset review :param clean_room_name: str Name of the clean room :param asset_type: :class:`CleanRoomAssetAssetType` - can only be NOTEBOOK_FILE for now + Asset type. Can only be NOTEBOOK_FILE. :param name: str Name of the asset :param notebook_review: :class:`NotebookVersionReview` @@ -1620,7 +1626,8 @@ def update( For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - For notebooks, the name is the notebook file name. + For notebooks, the name is the notebook file name. For jar analyses, the name is the jar analysis + name. :param asset: :class:`CleanRoomAsset` The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to update. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 071ac89e0..c7ca04416 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -3410,6 +3410,15 @@ class GcpAttributes: boot_disk_size: Optional[int] = None """Boot disk size in GB""" + first_on_demand: Optional[int] = None + """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This + value should be greater than 0, to make sure the cluster driver node is placed on an on-demand + instance. If this value is greater than or equal to the current cluster size, all nodes will be + placed on on-demand instances. If this value is less than the current cluster size, + `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed + on `availability` instances. Note that this value does not affect cluster size and cannot + currently be mutated over the lifetime of a cluster.""" + google_service_account: Optional[str] = None """If provided, the cluster will impersonate the google service account when accessing gcloud services (like GCS). The google service account must have previously been added to the @@ -3441,6 +3450,8 @@ def as_dict(self) -> dict: body["availability"] = self.availability.value if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3458,6 +3469,8 @@ def as_shallow_dict(self) -> dict: body["availability"] = self.availability if self.boot_disk_size is not None: body["boot_disk_size"] = self.boot_disk_size + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand if self.google_service_account is not None: body["google_service_account"] = self.google_service_account if self.local_ssd_count is not None: @@ -3474,6 +3487,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: return cls( availability=_enum(d, "availability", GcpAvailability), boot_disk_size=d.get("boot_disk_size", None), + first_on_demand=d.get("first_on_demand", None), google_service_account=d.get("google_service_account", None), local_ssd_count=d.get("local_ssd_count", None), use_preemptible_executors=d.get("use_preemptible_executors", None), diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 20c2f37f5..810fc18d9 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -589,6 +589,40 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: ) +@dataclass +class ListDatabaseCatalogsResponse: + database_catalogs: Optional[List[DatabaseCatalog]] = None + + next_page_token: Optional[str] = None + """Pagination token to request the next page of database catalogs.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseCatalogsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_catalogs: + body["database_catalogs"] = [v.as_dict() for v in self.database_catalogs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseCatalogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_catalogs: + body["database_catalogs"] = self.database_catalogs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseCatalogsResponse: + """Deserializes the ListDatabaseCatalogsResponse from a dictionary.""" + return cls( + database_catalogs=_repeated_dict(d, "database_catalogs", DatabaseCatalog), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListDatabaseInstanceRolesResponse: database_instance_roles: Optional[List[DatabaseInstanceRole]] = None @@ -659,6 +693,40 @@ def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: ) +@dataclass +class ListSyncedDatabaseTablesResponse: + next_page_token: Optional[str] = None + """Pagination token to request the next page of synced tables.""" + + synced_tables: Optional[List[SyncedDatabaseTable]] = None + + def as_dict(self) -> dict: + """Serializes the ListSyncedDatabaseTablesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.synced_tables: + body["synced_tables"] = [v.as_dict() for v in self.synced_tables] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListSyncedDatabaseTablesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.synced_tables: + body["synced_tables"] = self.synced_tables + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListSyncedDatabaseTablesResponse: + """Deserializes the ListSyncedDatabaseTablesResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + synced_tables=_repeated_dict(d, "synced_tables", SyncedDatabaseTable), + ) + + @dataclass class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other @@ -1733,10 +1801,47 @@ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers) return SyncedDatabaseTable.from_dict(res) + def list_database_catalogs( + self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseCatalog]: + """This API is currently unimplemented, but exposed for Terraform support. + + :param instance_name: str + Name of the instance to get database catalogs for. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of synced database tables. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseCatalog` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/instances/{instance_name}/catalogs", query=query, headers=headers + ) + if "database_catalogs" in json: + for v in json["database_catalogs"]: + yield DatabaseCatalog.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + def list_database_instance_roles( self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseInstanceRole]: - """START OF PG ROLE APIs Section + """START OF PG ROLE APIs Section These APIs are marked a PUBLIC with stage < PUBLIC_PREVIEW. With more + recent Lakebase V2 plans, we don't plan to ever advance these to PUBLIC_PREVIEW. These APIs will + remain effectively undocumented/UI-only and we'll aim for a new public roles API as part of V2 PuPr. :param instance_name: str :param page_size: int (optional) @@ -1798,6 +1903,67 @@ def list_database_instances( return query["page_token"] = json["next_page_token"] + def list_synced_database_tables( + self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SyncedDatabaseTable]: + """This API is currently unimplemented, but exposed for Terraform support. + + :param instance_name: str + Name of the instance to get synced tables for. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of synced database tables. Requests first page if absent. + + :returns: Iterator over :class:`SyncedDatabaseTable` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/instances/{instance_name}/synced_tables", query=query, headers=headers + ) + if "synced_tables" in json: + for v in json["synced_tables"]: + yield SyncedDatabaseTable.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_database_catalog( + self, name: str, database_catalog: DatabaseCatalog, update_mask: str + ) -> DatabaseCatalog: + """This API is currently unimplemented, but exposed for Terraform support. + + :param name: str + The name of the catalog in UC. + :param database_catalog: :class:`DatabaseCatalog` + Note that updating a database catalog is not yet supported. + :param update_mask: str + The list of fields to update. Setting this field is not yet supported. + + :returns: :class:`DatabaseCatalog` + """ + body = database_catalog.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/catalogs/{name}", query=query, body=body, headers=headers) + return DatabaseCatalog.from_dict(res) + def update_database_instance( self, name: str, database_instance: DatabaseInstance, update_mask: str ) -> DatabaseInstance: @@ -1807,7 +1973,8 @@ def update_database_instance( The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` :param update_mask: str - The list of fields to update. This field is not yet supported, and is ignored by the server. + The list of fields to update. If unspecified, all fields will be updated when possible. To wipe out + custom_tags, specify custom_tags in the update_mask with an empty custom_tags map. :returns: :class:`DatabaseInstance` """ @@ -1822,3 +1989,29 @@ def update_database_instance( res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) + + def update_synced_database_table( + self, name: str, synced_table: SyncedDatabaseTable, update_mask: str + ) -> SyncedDatabaseTable: + """This API is currently unimplemented, but exposed for Terraform support. + + :param name: str + Full three-part (catalog, schema, table) name of the table. + :param synced_table: :class:`SyncedDatabaseTable` + Note that updating a synced database table is not yet supported. + :param update_mask: str + The list of fields to update. Setting this field is not yet supported. + + :returns: :class:`SyncedDatabaseTable` + """ + body = synced_table.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/synced_tables/{name}", query=query, body=body, headers=headers) + return SyncedDatabaseTable.from_dict(res) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index d7c16b3c8..d8f4e3122 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -3037,6 +3037,11 @@ class JobSettings: the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" + usage_policy_id: Optional[str] = None + """The id of the user specified usage policy to use for this job. If not specified, a default usage + policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for + the budget policy used by this workload.""" + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" @@ -3089,6 +3094,8 @@ def as_dict(self) -> dict: body["timeout_seconds"] = self.timeout_seconds if self.trigger: body["trigger"] = self.trigger.as_dict() + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.webhook_notifications: body["webhook_notifications"] = self.webhook_notifications.as_dict() return body @@ -3142,6 +3149,8 @@ def as_shallow_dict(self) -> dict: body["timeout_seconds"] = self.timeout_seconds if self.trigger: body["trigger"] = self.trigger + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id if self.webhook_notifications: body["webhook_notifications"] = self.webhook_notifications return body @@ -3173,6 +3182,7 @@ def from_dict(cls, d: Dict[str, Any]) -> JobSettings: tasks=_repeated_dict(d, "tasks", Task), timeout_seconds=d.get("timeout_seconds", None), trigger=_from_dict(d, "trigger", TriggerSettings), + usage_policy_id=d.get("usage_policy_id", None), webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), ) @@ -7884,6 +7894,8 @@ class TerminationCodeCode(Enum): run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. + * `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will + be retried with a mitigation config. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" @@ -8437,6 +8449,7 @@ def create( tasks: Optional[List[Task]] = None, timeout_seconds: Optional[int] = None, trigger: Optional[TriggerSettings] = None, + usage_policy_id: Optional[str] = None, webhook_notifications: Optional[WebhookNotifications] = None, ) -> CreateResponse: """Create a new job. @@ -8531,6 +8544,10 @@ def create( A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. + :param usage_policy_id: str (optional) + The id of the user specified usage policy to use for this job. If not specified, a default usage + policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the + budget policy used by this workload. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. @@ -8585,6 +8602,8 @@ def create( body["timeout_seconds"] = timeout_seconds if trigger is not None: body["trigger"] = trigger.as_dict() + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id if webhook_notifications is not None: body["webhook_notifications"] = webhook_notifications.as_dict() headers = { @@ -9381,6 +9400,7 @@ def submit( run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, + usage_policy_id: Optional[str] = None, webhook_notifications: Optional[WebhookNotifications] = None, ) -> Wait[Run]: """Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. @@ -9432,6 +9452,9 @@ def submit( :param tasks: List[:class:`SubmitTask`] (optional) :param timeout_seconds: int (optional) An optional timeout applied to each run of this job. A value of `0` means no timeout. + :param usage_policy_id: str (optional) + The user specified id of the usage policy to use for this one-time run. If not specified, a default + usage policy may be applied when creating or modifying the job. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when the run begins or completes. @@ -9466,6 +9489,8 @@ def submit( body["tasks"] = [v.as_dict() for v in tasks] if timeout_seconds is not None: body["timeout_seconds"] = timeout_seconds + if usage_policy_id is not None: + body["usage_policy_id"] = usage_policy_id if webhook_notifications is not None: body["webhook_notifications"] = webhook_notifications.as_dict() headers = { @@ -9496,6 +9521,7 @@ def submit_and_wait( run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, + usage_policy_id: Optional[str] = None, webhook_notifications: Optional[WebhookNotifications] = None, timeout=timedelta(minutes=20), ) -> Run: @@ -9513,6 +9539,7 @@ def submit_and_wait( run_name=run_name, tasks=tasks, timeout_seconds=timeout_seconds, + usage_policy_id=usage_policy_id, webhook_notifications=webhook_notifications, ).result(timeout=timeout) diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 9c8c90627..85165d73a 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -3494,10 +3494,8 @@ class PublishSpec: online_table_name: str """The full three-part (catalog, schema, table) name of the online table.""" - publish_mode: Optional[PublishSpecPublishMode] = None - """The publish mode of the pipeline that syncs the online table with the source table. Defaults to - TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed - (CDF) enabled.""" + publish_mode: PublishSpecPublishMode + """The publish mode of the pipeline that syncs the online table with the source table.""" def as_dict(self) -> dict: """Serializes the PublishSpec into a dictionary suitable for use as a JSON request body.""" @@ -3534,6 +3532,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishSpec: class PublishSpecPublishMode(Enum): CONTINUOUS = "CONTINUOUS" + SNAPSHOT = "SNAPSHOT" TRIGGERED = "TRIGGERED" diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index b74651abd..dcd951f02 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -4474,6 +4474,7 @@ def query( self, name: str, *, + client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4487,11 +4488,15 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, + usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. + :param client_request_id: str (optional) + Optional user-provided request identifier that will be recorded in the inference table and the usage + tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4533,10 +4538,14 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. + :param usage_context: Dict[str,str] (optional) + Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} + if client_request_id is not None: + body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4563,6 +4572,8 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature + if usage_context is not None: + body["usage_context"] = usage_context headers = { "Accept": "application/json", "Content-Type": "application/json", @@ -4777,6 +4788,7 @@ def query( self, name: str, *, + client_request_id: Optional[str] = None, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str, str]] = None, @@ -4790,11 +4802,15 @@ def query( stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None, + usage_context: Optional[Dict[str, str]] = None, ) -> QueryEndpointResponse: """Query a serving endpoint :param name: str The name of the serving endpoint. This field is required and is provided via the path parameter. + :param client_request_id: str (optional) + Optional user-provided request identifier that will be recorded in the inference table and the usage + tracking table. :param dataframe_records: List[Any] (optional) Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) @@ -4836,10 +4852,14 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. + :param usage_context: Dict[str,str] (optional) + Optional user-provided context that will be recorded in the usage tracking table. :returns: :class:`QueryEndpointResponse` """ body = {} + if client_request_id is not None: + body["client_request_id"] = client_request_id if dataframe_records is not None: body["dataframe_records"] = [v for v in dataframe_records] if dataframe_split is not None: @@ -4866,6 +4886,8 @@ def query( body["stream"] = stream if temperature is not None: body["temperature"] = temperature + if usage_context is not None: + body["usage_context"] = usage_context data_plane_info = self._data_plane_info_query( name=name, ) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 975860d8a..385422529 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -4617,6 +4617,18 @@ def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: @dataclass class SlackConfig: + channel_id: Optional[str] = None + """[Input-Only] Slack channel ID for notifications.""" + + channel_id_set: Optional[bool] = None + """[Output-Only] Whether channel ID is set.""" + + oauth_token: Optional[str] = None + """[Input-Only] OAuth token for Slack authentication.""" + + oauth_token_set: Optional[bool] = None + """[Output-Only] Whether OAuth token is set.""" + url: Optional[str] = None """[Input-Only] URL for Slack destination.""" @@ -4626,6 +4638,14 @@ class SlackConfig: def as_dict(self) -> dict: """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.channel_id is not None: + body["channel_id"] = self.channel_id + if self.channel_id_set is not None: + body["channel_id_set"] = self.channel_id_set + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token + if self.oauth_token_set is not None: + body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4635,6 +4655,14 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.channel_id is not None: + body["channel_id"] = self.channel_id + if self.channel_id_set is not None: + body["channel_id_set"] = self.channel_id_set + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token + if self.oauth_token_set is not None: + body["oauth_token_set"] = self.oauth_token_set if self.url is not None: body["url"] = self.url if self.url_set is not None: @@ -4644,7 +4672,14 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" - return cls(url=d.get("url", None), url_set=d.get("url_set", None)) + return cls( + channel_id=d.get("channel_id", None), + channel_id_set=d.get("channel_id_set", None), + oauth_token=d.get("oauth_token", None), + oauth_token_set=d.get("oauth_token_set", None), + url=d.get("url", None), + url_set=d.get("url_set", None), + ) @dataclass