From 49163db29b9a43edd87d7d54c7b2bb58d3e5dbad Mon Sep 17 00:00:00 2001 From: Omer Lachish Date: Tue, 8 Jul 2025 10:24:51 +0200 Subject: [PATCH] Update SDK to latest OpenAPI spec --- .codegen/_openapi_sha | 2 +- NEXT_CHANGELOG.md | 74 + databricks/sdk/__init__.py | 27 +- databricks/sdk/service/aibuilder.py | 36 - databricks/sdk/service/apps.py | 4 +- databricks/sdk/service/billing.py | 23 +- databricks/sdk/service/catalog.py | 1839 +++++++++++++++-- databricks/sdk/service/cleanrooms.py | 25 +- databricks/sdk/service/compute.py | 323 +-- databricks/sdk/service/dashboards.py | 291 ++- databricks/sdk/service/database.py | 709 ++++++- databricks/sdk/service/iam.py | 3 - databricks/sdk/service/jobs.py | 128 -- databricks/sdk/service/marketplace.py | 2 - databricks/sdk/service/ml.py | 975 ++++++--- databricks/sdk/service/oauth2.py | 1 - databricks/sdk/service/pipelines.py | 41 +- databricks/sdk/service/provisioning.py | 125 -- databricks/sdk/service/qualitymonitorv2.py | 18 - databricks/sdk/service/serving.py | 51 +- databricks/sdk/service/settings.py | 131 +- databricks/sdk/service/sharing.py | 12 +- databricks/sdk/service/sql.py | 158 +- databricks/sdk/service/vectorsearch.py | 19 - databricks/sdk/service/workspace.py | 6 - docs/account/billing/budget_policy.rst | 3 +- docs/account/billing/log_delivery.rst | 1 - docs/account/provisioning/networks.rst | 6 - docs/account/provisioning/private_access.rst | 10 - docs/account/provisioning/storage.rst | 1 - docs/account/provisioning/vpc_endpoints.rst | 1 - docs/account/provisioning/workspaces.rst | 23 - docs/account/settings/ip_access_lists.rst | 12 - .../account/settings/network_connectivity.rst | 5 - docs/account/settings/network_policies.rst | 2 + .../workspace_network_configuration.rst | 1 + docs/dbdataclasses/aibuilder.rst | 8 - docs/dbdataclasses/billing.rst | 4 - docs/dbdataclasses/catalog.rst | 429 +++- docs/dbdataclasses/cleanrooms.rst | 4 - docs/dbdataclasses/compute.rst | 3 + docs/dbdataclasses/dashboards.rst | 16 +- docs/dbdataclasses/database.rst | 66 +- docs/dbdataclasses/ml.rst | 123 +- docs/dbdataclasses/pipelines.rst | 6 + docs/dbdataclasses/qualitymonitorv2.rst | 4 - docs/dbdataclasses/serving.rst | 6 + docs/dbdataclasses/settings.rst | 8 - docs/dbdataclasses/sql.rst | 12 +- docs/workspace/apps/apps.rst | 1 + docs/workspace/catalog/credentials.rst | 5 - docs/workspace/catalog/external_lineage.rst | 67 + docs/workspace/catalog/external_locations.rst | 10 +- docs/workspace/catalog/external_metadata.rst | 80 + docs/workspace/catalog/index.rst | 2 + docs/workspace/catalog/online_tables.rst | 2 +- docs/workspace/catalog/table_constraints.rst | 2 - docs/workspace/catalog/tables.rst | 15 +- docs/workspace/catalog/volumes.rst | 5 - .../cleanrooms/clean_room_assets.rst | 4 +- docs/workspace/cleanrooms/clean_rooms.rst | 2 +- docs/workspace/compute/clusters.rst | 76 - docs/workspace/dashboards/genie.rst | 79 +- docs/workspace/dashboards/lakeview.rst | 3 + docs/workspace/database/database.rst | 59 +- docs/workspace/jobs/jobs.rst | 6 - docs/workspace/ml/feature_store.rst | 4 +- docs/workspace/ml/index.rst | 1 + docs/workspace/ml/materialized_features.rst | 84 + docs/workspace/ml/model_registry.rst | 112 +- docs/workspace/pipelines/pipelines.rst | 11 - docs/workspace/serving/serving_endpoints.rst | 5 +- docs/workspace/settings/default_namespace.rst | 7 - docs/workspace/settings/ip_access_lists.rst | 12 - docs/workspace/sharing/providers.rst | 1 - .../sharing/recipient_federation_policies.rst | 1 + docs/workspace/sharing/recipients.rst | 1 - docs/workspace/sql/dashboard_widgets.rst | 6 +- docs/workspace/sql/dashboards.rst | 4 +- .../sql/query_visualizations_legacy.rst | 10 +- docs/workspace/sql/warehouses.rst | 6 - .../vectorsearch/vector_search_indexes.rst | 4 - 82 files changed, 4390 insertions(+), 2074 deletions(-) create mode 100644 docs/workspace/catalog/external_lineage.rst create mode 100644 docs/workspace/catalog/external_metadata.rst create mode 100644 docs/workspace/ml/materialized_features.rst diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3fbd74142..79f2d92b6 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -033bcb9242b006001e2cf3956896711681de1a8c \ No newline at end of file +7aade78d7c1b9f56b56f546480acb516ee93d98d \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 9bb0298b3..b2b31e12e 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -23,3 +23,77 @@ * Added `remote_disk_throughput` and `total_initial_remote_disk_size` fields for `databricks.sdk.service.compute.UpdateClusterResource`. * Added `r` enum value for `databricks.sdk.service.compute.Language`. * Added `continuous` and `continuous_restart` enum values for `databricks.sdk.service.jobs.TriggerType`. +* Added [w.external_lineage](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/external_lineage.html) workspace-level service and [w.external_metadata](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/external_metadata.html) workspace-level service. +* Added [w.materialized_features](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/materialized_features.html) workspace-level service. +* Added `delete_conversation()`, `list_conversations()` and `trash_space()` methods for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service. +* Added `create_database_instance_role()`, `delete_database_instance_role()`, `get_database_instance_role()` and `list_database_instance_roles()` methods for [w.database](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/database/database.html) workspace-level service. +* Added `connection` and `credential` fields for `databricks.sdk.service.catalog.Dependency`. +* Added `rely` field for `databricks.sdk.service.catalog.ForeignKeyConstraint`. +* Added `rely` field for `databricks.sdk.service.catalog.PrimaryKeyConstraint`. +* Added `securable_kind_manifest` field for `databricks.sdk.service.catalog.TableInfo`. +* Added `securable_kind_manifest` field for `databricks.sdk.service.catalog.TableSummary`. +* Added `child_instance_refs`, `effective_enable_readable_secondaries`, `effective_node_count`, `effective_retention_window_in_days`, `enable_readable_secondaries`, `node_count`, `parent_instance_ref`, `read_only_dns` and `retention_window_in_days` fields for `databricks.sdk.service.database.DatabaseInstance`. +* Added `claims` field for `databricks.sdk.service.database.GenerateDatabaseCredentialRequest`. +* Added `last_sync` field for `databricks.sdk.service.database.SyncedTableStatus`. +* Added `activity` field for `databricks.sdk.service.ml.DeleteTransitionRequestResponse`. +* Added `max_results` field for `databricks.sdk.service.ml.ListWebhooksRequest`. +* Added `body` and `status_code` fields for `databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* Added `model_version_databricks` field for `databricks.sdk.service.ml.TransitionStageResponse`. +* Added `registered_model` field for `databricks.sdk.service.ml.UpdateModelResponse`. +* Added `model_version` field for `databricks.sdk.service.ml.UpdateModelVersionResponse`. +* Added `webhook` field for `databricks.sdk.service.ml.UpdateWebhookResponse`. +* Added `run_as` field for `databricks.sdk.service.pipelines.GetPipelineResponse`. +* Added `principal` field for `databricks.sdk.service.serving.AiGatewayRateLimit`. +* Added `description` field for `databricks.sdk.service.serving.CreateServingEndpoint`. +* Added `served_entity_name` field for `databricks.sdk.service.serving.Route`. +* Added `any_static_credential` enum value for `databricks.sdk.service.catalog.CredentialType`. +* Added `databricks_row_store_format`, `delta_uniform_hudi`, `delta_uniform_iceberg`, `hive`, `iceberg`, `mongodb_format`, `oracle_format`, `salesforce_data_cloud_format` and `teradata_format` enum values for `databricks.sdk.service.catalog.DataSourceFormat`. +* Added `metric_view` enum value for `databricks.sdk.service.catalog.TableType`. +* Added `security_agents_failed_initial_verification` enum value for `databricks.sdk.service.compute.TerminationReasonCode`. +* Added `can_create_registered_model` enum value for `databricks.sdk.service.ml.PermissionLevel`. +* Added `bigquery` enum value for `databricks.sdk.service.pipelines.IngestionSourceType`. +* Added `append_only` enum value for `databricks.sdk.service.pipelines.TableSpecificConfigScdType`. +* Added `service_principal` and `user_group` enum values for `databricks.sdk.service.serving.AiGatewayRateLimitKey`. +* [Breaking] Changed `cancel_optimize()` and `delete_custom_llm()` methods for [w.ai_builder](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/aibuilder/ai_builder.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete()` method for [a.budget_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/budget_policy.html) account-level service to return `any` dataclass. +* [Breaking] Changed `delete()` method for [w.online_tables](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/online_tables.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete()` method for [w.clean_rooms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_rooms.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete_schedule()` and `delete_subscription()` methods for [w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/lakeview.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete_database_catalog()`, `delete_database_instance()`, `delete_database_table()` and `delete_synced_database_table()` methods for [w.database](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/database/database.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete_online_store()` method for [w.feature_store](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/feature_store.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete_transition_request()`, `update_model()`, `update_model_version()` and `update_webhook()` methods for [w.model_registry](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/model_registry.html) workspace-level service return type to become non-empty. +* [Breaking] Changed `delete_webhook()` method for [w.model_registry](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/model_registry.html) workspace-level service with new required argument order. +* [Breaking] Changed `delete()` method for [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/oauth2/federation_policy.html) account-level service to return `any` dataclass. +* [Breaking] Changed `delete()` method for [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/oauth2/service_principal_federation_policy.html) account-level service to return `any` dataclass. +* [Breaking] Changed `delete_quality_monitor()` method for [w.quality_monitor_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/qualitymonitorv2/quality_monitor_v2.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `delete_network_connectivity_configuration()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service to return `any` dataclass. +* [Breaking] Changed `delete_network_policy_rpc()` method for [a.network_policies](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_policies.html) account-level service to return `any` dataclass. +* [Breaking] Changed `delete()` method for [w.recipient_federation_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sharing/recipient_federation_policies.html) workspace-level service to return `any` dataclass. +* [Breaking] Changed `list()` method for [w.alerts_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_legacy.html) workspace-level service . New request type is `any` dataclass. +* [Breaking] Changed `update()` method for [w.dashboard_widgets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/dashboard_widgets.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateWidgetRequest` dataclass. +* [Breaking] Changed `list()` method for [w.data_sources](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/data_sources.html) workspace-level service . New request type is `any` dataclass. +* [Breaking] Changed `create()` method for [w.query_visualizations_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/query_visualizations_legacy.html) workspace-level service with new required argument order. +* [Breaking] Changed `from_stage` and `to_stage` fields for `databricks.sdk.service.ml.Activity` to type `str` dataclass. +* [Breaking] Changed `stage` field for `databricks.sdk.service.ml.ApproveTransitionRequest` to type `str` dataclass. +* [Breaking] Changed `stage` field for `databricks.sdk.service.ml.CreateTransitionRequest` to type `str` dataclass. +* [Breaking] Changed `stage` field for `databricks.sdk.service.ml.DeleteTransitionRequestRequest` to type `str` dataclass. +* [Breaking] Changed `id` field for `databricks.sdk.service.ml.DeleteWebhookRequest` to be required. +* [Breaking] Changed `capacity` field for `databricks.sdk.service.ml.OnlineStore` to be required. +* Changed `capacity` field for `databricks.sdk.service.ml.OnlineStore` to be required. +* [Breaking] Changed `online_table_name` field for `databricks.sdk.service.ml.PublishSpec` to be required. +* [Breaking] Changed `stage` field for `databricks.sdk.service.ml.RejectTransitionRequest` to type `str` dataclass. +* [Breaking] Changed `stage` field for `databricks.sdk.service.ml.TransitionModelVersionStageDatabricks` to type `str` dataclass. +* [Breaking] Changed `to_stage` field for `databricks.sdk.service.ml.TransitionRequest` to type `str` dataclass. +* Changed `served_model_name` field for `databricks.sdk.service.serving.Route` to no longer be required. +* [Breaking] Changed `served_model_name` field for `databricks.sdk.service.serving.Route` to no longer be required. +* Changed pagination for [TablesAPI.list](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/tables.html#databricks.sdk.service.catalog.TablesAPI.list) method. +* Changed pagination for [TablesAPI.list_summaries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/tables.html#databricks.sdk.service.catalog.TablesAPI.list_summaries) method. +* [Breaking] Removed `generate_download_full_query_result()` and `get_download_full_query_result()` methods for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service. +* [Breaking] Removed `include_delta_metadata` field for `databricks.sdk.service.catalog.ListTablesRequest`. +* [Breaking] Removed `webhook` field for `databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* [Breaking] Removed `model_version` field for `databricks.sdk.service.ml.TransitionStageResponse`. +* [Breaking] Removed `unknown_catalog_type` enum value for `databricks.sdk.service.catalog.CatalogType`. +* [Breaking] Removed `hive_custom` and `hive_serde` enum values for `databricks.sdk.service.catalog.DataSourceFormat`. +* [Breaking] Removed `unknown_securable_type` enum value for `databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Removed `archived`, `none`, `production` and `staging` enum values for `databricks.sdk.service.ml.DeleteTransitionRequestStage`. +* [Breaking] Removed `archived`, `none`, `production` and `staging` enum values for `databricks.sdk.service.ml.Stage`. diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 3f144d309..f0d9efc29 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -46,7 +46,9 @@ AccountStorageCredentialsAPI, ArtifactAllowlistsAPI, CatalogsAPI, ConnectionsAPI, CredentialsAPI, - ExternalLocationsAPI, FunctionsAPI, + ExternalLineageAPI, + ExternalLocationsAPI, + ExternalMetadataAPI, FunctionsAPI, GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, QualityMonitorsAPI, @@ -88,7 +90,8 @@ ProviderListingsAPI, ProviderPersonalizationRequestsAPI, ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) from databricks.sdk.service.ml import (ExperimentsAPI, FeatureStoreAPI, - ForecastingAPI, ModelRegistryAPI) + ForecastingAPI, MaterializedFeaturesAPI, + ModelRegistryAPI) from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, CustomAppIntegrationAPI, OAuthPublishedAppsAPI, @@ -264,7 +267,9 @@ def __init__( self._dbfs = DbfsExt(self._api_client) self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._experiments = pkg_ml.ExperimentsAPI(self._api_client) + self._external_lineage = pkg_catalog.ExternalLineageAPI(self._api_client) self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client) + self._external_metadata = pkg_catalog.ExternalMetadataAPI(self._api_client) self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) self._functions = pkg_catalog.FunctionsAPI(self._api_client) @@ -280,6 +285,7 @@ def __init__( self._lakeview = pkg_dashboards.LakeviewAPI(self._api_client) self._lakeview_embedded = pkg_dashboards.LakeviewEmbeddedAPI(self._api_client) self._libraries = pkg_compute.LibrariesAPI(self._api_client) + self._materialized_features = pkg_ml.MaterializedFeaturesAPI(self._api_client) self._metastores = pkg_catalog.MetastoresAPI(self._api_client) self._model_registry = pkg_ml.ModelRegistryAPI(self._api_client) self._model_versions = pkg_catalog.ModelVersionsAPI(self._api_client) @@ -414,7 +420,7 @@ def clean_room_task_runs(self) -> pkg_cleanrooms.CleanRoomTaskRunsAPI: @property def clean_rooms(self) -> pkg_cleanrooms.CleanRoomsAPI: - """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" + """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other's data.""" return self._clean_rooms @property @@ -512,11 +518,21 @@ def experiments(self) -> pkg_ml.ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.""" return self._experiments + @property + def external_lineage(self) -> pkg_catalog.ExternalLineageAPI: + """External Lineage APIs enable defining and managing lineage relationships between Databricks objects and external systems.""" + return self._external_lineage + @property def external_locations(self) -> pkg_catalog.ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.""" return self._external_locations + @property + def external_metadata(self) -> pkg_catalog.ExternalMetadataAPI: + """External Metadata objects enable customers to register and manage metadata about external systems within Unity Catalog.""" + return self._external_metadata + @property def feature_store(self) -> pkg_ml.FeatureStoreAPI: """A feature store is a centralized repository that enables data scientists to find and share features.""" @@ -592,6 +608,11 @@ def libraries(self) -> pkg_compute.LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.""" return self._libraries + @property + def materialized_features(self) -> pkg_ml.MaterializedFeaturesAPI: + """Materialized Features are columns in tables and views that can be directly used as features to train and serve ML models.""" + return self._materialized_features + @property def metastores(self) -> pkg_catalog.MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog.""" diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py index 2d2633622..e71040b56 100755 --- a/databricks/sdk/service/aibuilder.py +++ b/databricks/sdk/service/aibuilder.py @@ -20,24 +20,6 @@ class CancelCustomLlmOptimizationRunRequest: id: Optional[str] = None -@dataclass -class CancelOptimizeResponse: - def as_dict(self) -> dict: - """Serializes the CancelOptimizeResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelOptimizeResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelOptimizeResponse: - """Deserializes the CancelOptimizeResponse from a dictionary.""" - return cls() - - @dataclass class CreateCustomLlmRequest: name: str @@ -221,24 +203,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Dataset: return cls(table=_from_dict(d, "table", Table)) -@dataclass -class DeleteCustomLlmResponse: - def as_dict(self) -> dict: - """Serializes the DeleteCustomLlmResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteCustomLlmResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomLlmResponse: - """Deserializes the DeleteCustomLlmResponse from a dictionary.""" - return cls() - - @dataclass class StartCustomLlmOptimizationRunRequest: id: Optional[str] = None diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 9f9e38c4a..1a83022e6 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -222,7 +222,6 @@ class AppAccessControlRequest: """name of the group""" permission_level: Optional[AppPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -491,7 +490,6 @@ class AppPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[AppPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the AppPermission into a dictionary suitable for use as a JSON request body.""" @@ -577,7 +575,6 @@ class AppPermissionsDescription: description: Optional[str] = None permission_level: Optional[AppPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -1279,6 +1276,7 @@ def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeploy :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` + The app deployment configuration. :returns: Long-running operation waiter for :class:`AppDeployment`. diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 8c61275b5..d779cc24f 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -863,24 +863,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteBudgetConfigurationResponse: return cls() -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - class DeliveryStatus(Enum): """* The status string for log delivery. Possible values are: `CREATED`: There were no log delivery attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has @@ -1626,7 +1608,6 @@ class WrappedCreateLogDeliveryConfiguration: """* Properties of the new log delivery configuration.""" log_delivery_configuration: CreateLogDeliveryConfigurationParams - """* Log Delivery Configuration""" def as_dict(self) -> dict: """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" @@ -1883,7 +1864,8 @@ def update( :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` - Contains the BudgetPolicy details. + The policy to update. `creator_user_id` cannot be specified in the request. All other fields must be + specified even if not changed. The `policy_id` is used to identify the policy to update. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy @@ -2102,7 +2084,6 @@ def create( [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` - * Log Delivery Configuration :returns: :class:`WrappedLogDeliveryConfiguration` """ diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 102bae13f..738d13cb6 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -598,7 +598,7 @@ class AwsSqsQueue: queue_url: Optional[str] = None """The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} - REQUIRED for provided_sqs.""" + Required for provided_sqs.""" def as_dict(self) -> dict: """Serializes the AwsSqsQueue into a dictionary suitable for use as a JSON request body.""" @@ -803,15 +803,15 @@ class AzureQueueStorage: queue_url: Optional[str] = None """The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} - REQUIRED for provided_aqs.""" + Required for provided_aqs.""" resource_group: Optional[str] = None """The resource group for the queue, event grid subscription, and external location storage - account. ONLY REQUIRED for locations with a service principal storage credential""" + account. Only required for locations with a service principal storage credential""" subscription_id: Optional[str] = None - """OPTIONAL: The subscription id for the queue, event grid subscription, and external location - storage account. REQUIRED for locations with a service principal storage credential""" + """Optional subscription id for the queue, event grid subscription, and external location storage + account. Required for locations with a service principal storage credential""" def as_dict(self) -> dict: """Serializes the AzureQueueStorage into a dictionary suitable for use as a JSON request body.""" @@ -948,7 +948,6 @@ class CatalogInfo: through the BROWSE privilege when include_browse is enabled in the request.""" catalog_type: Optional[CatalogType] = None - """The type of the catalog.""" comment: Optional[str] = None """User-provided free-form text description.""" @@ -994,10 +993,8 @@ class CatalogInfo: A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" provisioning_info: Optional[ProvisioningInfo] = None - """Status of an asynchronously provisioned resource.""" securable_type: Optional[SecurableType] = None - """The type of Unity Catalog securable.""" share_name: Optional[str] = None """The name of the share under the share provider.""" @@ -1163,7 +1160,6 @@ class CatalogType(Enum): MANAGED_CATALOG = "MANAGED_CATALOG" MANAGED_ONLINE_CATALOG = "MANAGED_ONLINE_CATALOG" SYSTEM_CATALOG = "SYSTEM_CATALOG" - UNKNOWN_CATALOG_TYPE = "UNKNOWN_CATALOG_TYPE" @dataclass @@ -1359,6 +1355,36 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) +@dataclass +class ColumnRelationship: + source: Optional[str] = None + + target: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ColumnRelationship into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source is not None: + body["source"] = self.source + if self.target is not None: + body["target"] = self.target + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ColumnRelationship into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source is not None: + body["source"] = self.source + if self.target is not None: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ColumnRelationship: + """Deserializes the ColumnRelationship from a dictionary.""" + return cls(source=d.get("source", None), target=d.get("target", None)) + + class ColumnTypeName(Enum): ARRAY = "ARRAY" @@ -1387,6 +1413,33 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" +@dataclass +class ConnectionDependency: + """A connection that is dependent on a SQL object.""" + + connection_name: Optional[str] = None + """Full name of the dependent connection, in the form of __connection_name__.""" + + def as_dict(self) -> dict: + """Serializes the ConnectionDependency into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: + body["connection_name"] = self.connection_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectionDependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_name is not None: + body["connection_name"] = self.connection_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConnectionDependency: + """Deserializes the ConnectionDependency from a dictionary.""" + return cls(connection_name=d.get("connection_name", None)) + + @dataclass class ConnectionInfo: comment: Optional[str] = None @@ -1426,13 +1479,11 @@ class ConnectionInfo: """A map of key-value properties attached to the securable.""" provisioning_info: Optional[ProvisioningInfo] = None - """Status of an asynchronously provisioned resource.""" read_only: Optional[bool] = None """If the connection is read only.""" securable_type: Optional[SecurableType] = None - """The type of Unity Catalog securable.""" updated_at: Optional[int] = None """Time at which this connection was updated, in epoch milliseconds.""" @@ -1551,7 +1602,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 33""" + """Next Id: 36""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1883,10 +1934,9 @@ class CreateExternalLocation: """User-provided free-form text description.""" enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" + """Whether to enable file events on this external location.""" encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is @@ -1894,7 +1944,7 @@ class CreateExternalLocation: sufficient.""" file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" + """File event queue settings.""" read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" @@ -2492,6 +2542,65 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRegisteredModelRequest: ) +@dataclass +class CreateRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the CreateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateRequestExternalLineage: + """Deserializes the CreateRequestExternalLineage from a dictionary.""" + return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) + + @dataclass class CreateResponse: def as_dict(self) -> dict: @@ -2670,8 +2779,6 @@ class CreateTableConstraint: """The full name of the table referenced by the constraint.""" constraint: TableConstraint - """A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" def as_dict(self) -> dict: """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body.""" @@ -2709,11 +2816,6 @@ class CreateVolumeRequestContent: """The name of the volume""" volume_type: VolumeType - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" comment: Optional[str] = None """The comment attached to the volume""" @@ -2768,6 +2870,33 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: ) +@dataclass +class CredentialDependency: + """A credential that is dependent on a SQL object.""" + + credential_name: Optional[str] = None + """Full name of the dependent credential, in the form of __credential_name__.""" + + def as_dict(self) -> dict: + """Serializes the CredentialDependency into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_name is not None: + body["credential_name"] = self.credential_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CredentialDependency into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_name is not None: + body["credential_name"] = self.credential_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CredentialDependency: + """Deserializes the CredentialDependency from a dictionary.""" + return cls(credential_name=d.get("credential_name", None)) + + @dataclass class CredentialInfo: aws_iam_role: Optional[AwsIamRole] = None @@ -2941,8 +3070,9 @@ class CredentialPurpose(Enum): class CredentialType(Enum): - """Next Id: 12""" + """Next Id: 13""" + ANY_STATIC_CREDENTIAL = "ANY_STATIC_CREDENTIAL" BEARER_TOKEN = "BEARER_TOKEN" OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" OAUTH_M2M = "OAUTH_M2M" @@ -2996,21 +3126,28 @@ class DataSourceFormat(Enum): BIGQUERY_FORMAT = "BIGQUERY_FORMAT" CSV = "CSV" DATABRICKS_FORMAT = "DATABRICKS_FORMAT" + DATABRICKS_ROW_STORE_FORMAT = "DATABRICKS_ROW_STORE_FORMAT" DELTA = "DELTA" DELTASHARING = "DELTASHARING" - HIVE_CUSTOM = "HIVE_CUSTOM" - HIVE_SERDE = "HIVE_SERDE" + DELTA_UNIFORM_HUDI = "DELTA_UNIFORM_HUDI" + DELTA_UNIFORM_ICEBERG = "DELTA_UNIFORM_ICEBERG" + HIVE = "HIVE" + ICEBERG = "ICEBERG" JSON = "JSON" + MONGODB_FORMAT = "MONGODB_FORMAT" MYSQL_FORMAT = "MYSQL_FORMAT" NETSUITE_FORMAT = "NETSUITE_FORMAT" + ORACLE_FORMAT = "ORACLE_FORMAT" ORC = "ORC" PARQUET = "PARQUET" POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" REDSHIFT_FORMAT = "REDSHIFT_FORMAT" + SALESFORCE_DATA_CLOUD_FORMAT = "SALESFORCE_DATA_CLOUD_FORMAT" SALESFORCE_FORMAT = "SALESFORCE_FORMAT" SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" SQLDW_FORMAT = "SQLDW_FORMAT" SQLSERVER_FORMAT = "SQLSERVER_FORMAT" + TERADATA_FORMAT = "TERADATA_FORMAT" TEXT = "TEXT" UNITY_CATALOG = "UNITY_CATALOG" VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" @@ -3152,6 +3289,49 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: return cls() +@dataclass +class DeleteRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the DeleteRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteRequestExternalLineage: + """Deserializes the DeleteRequestExternalLineage from a dictionary.""" + return cls( + id=d.get("id", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) + + @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -3206,18 +3386,24 @@ class DeltaSharingScopeEnum(Enum): @dataclass class Dependency: - """A dependency of a SQL object. Either the __table__ field or the __function__ field must be - defined.""" + """A dependency of a SQL object. One of the following fields must be defined: __table__, + __function__, __connection__, or __credential__.""" + + connection: Optional[ConnectionDependency] = None + + credential: Optional[CredentialDependency] = None function: Optional[FunctionDependency] = None - """A function that is dependent on a SQL object.""" table: Optional[TableDependency] = None - """A table that is dependent on a SQL object.""" def as_dict(self) -> dict: """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" body = {} + if self.connection: + body["connection"] = self.connection.as_dict() + if self.credential: + body["credential"] = self.credential.as_dict() if self.function: body["function"] = self.function.as_dict() if self.table: @@ -3227,6 +3413,10 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" body = {} + if self.connection: + body["connection"] = self.connection + if self.credential: + body["credential"] = self.credential if self.function: body["function"] = self.function if self.table: @@ -3237,7 +3427,10 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Dependency: """Deserializes the Dependency from a dictionary.""" return cls( - function=_from_dict(d, "function", FunctionDependency), table=_from_dict(d, "table", TableDependency) + connection=_from_dict(d, "connection", ConnectionDependency), + credential=_from_dict(d, "credential", CredentialDependency), + function=_from_dict(d, "function", FunctionDependency), + table=_from_dict(d, "table", TableDependency), ) @@ -3493,58 +3686,596 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: - """Deserializes the EnableRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: + """Deserializes the EnableRequest from a dictionary.""" + return cls( + catalog_name=d.get("catalog_name", None), + metastore_id=d.get("metastore_id", None), + schema_name=d.get("schema_name", None), + ) + + +@dataclass +class EnableResponse: + def as_dict(self) -> dict: + """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: + """Deserializes the EnableResponse from a dictionary.""" + return cls() + + +@dataclass +class EncryptionDetails: + """Encryption options that apply to clients connecting to cloud storage.""" + + sse_encryption_details: Optional[SseEncryptionDetails] = None + """Server-Side Encryption properties for clients communicating with AWS s3.""" + + def as_dict(self) -> dict: + """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: + """Deserializes the EncryptionDetails from a dictionary.""" + return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + + +@dataclass +class ExternalLineageExternalMetadata: + name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadata into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadata: + """Deserializes the ExternalLineageExternalMetadata from a dictionary.""" + return cls(name=d.get("name", None)) + + +@dataclass +class ExternalLineageExternalMetadataInfo: + """Represents the external metadata object in the lineage event.""" + + entity_type: Optional[str] = None + """Type of entity represented by the external metadata object.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + name: Optional[str] = None + """Name of the external metadata object.""" + + system_type: Optional[SystemType] = None + """Type of external system.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadataInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.system_type is not None: + body["system_type"] = self.system_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadataInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.system_type is not None: + body["system_type"] = self.system_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadataInfo: + """Deserializes the ExternalLineageExternalMetadataInfo from a dictionary.""" + return cls( + entity_type=d.get("entity_type", None), + event_time=d.get("event_time", None), + name=d.get("name", None), + system_type=_enum(d, "system_type", SystemType), + ) + + +@dataclass +class ExternalLineageFileInfo: + """Represents the path information in the lineage event.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + path: Optional[str] = None + """URL of the path.""" + + securable_name: Optional[str] = None + """The full name of the securable on the path.""" + + securable_type: Optional[str] = None + """The securable type of the securable on the path.""" + + storage_location: Optional[str] = None + """The storage location associated with securable on the path.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageFileInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.path is not None: + body["path"] = self.path + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.storage_location is not None: + body["storage_location"] = self.storage_location + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageFileInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.path is not None: + body["path"] = self.path + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.storage_location is not None: + body["storage_location"] = self.storage_location + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageFileInfo: + """Deserializes the ExternalLineageFileInfo from a dictionary.""" + return cls( + event_time=d.get("event_time", None), + path=d.get("path", None), + securable_name=d.get("securable_name", None), + securable_type=d.get("securable_type", None), + storage_location=d.get("storage_location", None), + ) + + +@dataclass +class ExternalLineageInfo: + """Lineage response containing lineage information of a data asset.""" + + external_lineage_info: Optional[ExternalLineageRelationshipInfo] = None + """Information about the edge metadata of the external lineage relationship.""" + + external_metadata_info: Optional[ExternalLineageExternalMetadataInfo] = None + """Information about external metadata involved in the lineage relationship.""" + + file_info: Optional[ExternalLineageFileInfo] = None + """Information about the file involved in the lineage relationship.""" + + model_info: Optional[ExternalLineageModelVersionInfo] = None + """Information about the model version involved in the lineage relationship.""" + + table_info: Optional[ExternalLineageTableInfo] = None + """Information about the table involved in the lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_lineage_info: + body["external_lineage_info"] = self.external_lineage_info.as_dict() + if self.external_metadata_info: + body["external_metadata_info"] = self.external_metadata_info.as_dict() + if self.file_info: + body["file_info"] = self.file_info.as_dict() + if self.model_info: + body["model_info"] = self.model_info.as_dict() + if self.table_info: + body["table_info"] = self.table_info.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_lineage_info: + body["external_lineage_info"] = self.external_lineage_info + if self.external_metadata_info: + body["external_metadata_info"] = self.external_metadata_info + if self.file_info: + body["file_info"] = self.file_info + if self.model_info: + body["model_info"] = self.model_info + if self.table_info: + body["table_info"] = self.table_info + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageInfo: + """Deserializes the ExternalLineageInfo from a dictionary.""" + return cls( + external_lineage_info=_from_dict(d, "external_lineage_info", ExternalLineageRelationshipInfo), + external_metadata_info=_from_dict(d, "external_metadata_info", ExternalLineageExternalMetadataInfo), + file_info=_from_dict(d, "file_info", ExternalLineageFileInfo), + model_info=_from_dict(d, "model_info", ExternalLineageModelVersionInfo), + table_info=_from_dict(d, "table_info", ExternalLineageTableInfo), + ) + + +@dataclass +class ExternalLineageModelVersion: + name: Optional[str] = None + + version: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineageModelVersion into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageModelVersion into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersion: + """Deserializes the ExternalLineageModelVersion from a dictionary.""" + return cls(name=d.get("name", None), version=d.get("version", None)) + + +@dataclass +class ExternalLineageModelVersionInfo: + """Represents the model version information in the lineage event.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + model_name: Optional[str] = None + """Name of the model.""" + + version: Optional[int] = None + """Version number of the model.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageModelVersionInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.model_name is not None: + body["model_name"] = self.model_name + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageModelVersionInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.model_name is not None: + body["model_name"] = self.model_name + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersionInfo: + """Deserializes the ExternalLineageModelVersionInfo from a dictionary.""" + return cls( + event_time=d.get("event_time", None), model_name=d.get("model_name", None), version=d.get("version", None) + ) + + +@dataclass +class ExternalLineageObject: + external_metadata: Optional[ExternalLineageExternalMetadata] = None + + model_version: Optional[ExternalLineageModelVersion] = None + + path: Optional[ExternalLineagePath] = None + + table: Optional[ExternalLineageTable] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineageObject into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_metadata: + body["external_metadata"] = self.external_metadata.as_dict() + if self.model_version: + body["model_version"] = self.model_version.as_dict() + if self.path: + body["path"] = self.path.as_dict() + if self.table: + body["table"] = self.table.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageObject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_metadata: + body["external_metadata"] = self.external_metadata + if self.model_version: + body["model_version"] = self.model_version + if self.path: + body["path"] = self.path + if self.table: + body["table"] = self.table + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageObject: + """Deserializes the ExternalLineageObject from a dictionary.""" + return cls( + external_metadata=_from_dict(d, "external_metadata", ExternalLineageExternalMetadata), + model_version=_from_dict(d, "model_version", ExternalLineageModelVersion), + path=_from_dict(d, "path", ExternalLineagePath), + table=_from_dict(d, "table", ExternalLineageTable), + ) + + +@dataclass +class ExternalLineagePath: + url: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineagePath into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineagePath into a shallow dictionary of its immediate attributes.""" + body = {} + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineagePath: + """Deserializes the ExternalLineagePath from a dictionary.""" + return cls(url=d.get("url", None)) + + +@dataclass +class ExternalLineageRelationship: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageRelationship into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageRelationship into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationship: + """Deserializes the ExternalLineageRelationship from a dictionary.""" + return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) + + +@dataclass +class ExternalLineageRelationshipInfo: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the ExternalLineageRelationshipInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageRelationshipInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationshipInfo: + """Deserializes the ExternalLineageRelationshipInfo from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), - metastore_id=d.get("metastore_id", None), - schema_name=d.get("schema_name", None), + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), ) @dataclass -class EnableResponse: +class ExternalLineageTable: + name: Optional[str] = None + def as_dict(self) -> dict: - """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageTable into a dictionary suitable for use as a JSON request body.""" body = {} + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: - """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageTable into a shallow dictionary of its immediate attributes.""" body = {} + if self.name is not None: + body["name"] = self.name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: - """Deserializes the EnableResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTable: + """Deserializes the ExternalLineageTable from a dictionary.""" + return cls(name=d.get("name", None)) @dataclass -class EncryptionDetails: - """Encryption options that apply to clients connecting to cloud storage.""" +class ExternalLineageTableInfo: + """Represents the table information in the lineage event.""" - sse_encryption_details: Optional[SseEncryptionDetails] = None - """Server-Side Encryption properties for clients communicating with AWS s3.""" + catalog_name: Optional[str] = None + """Name of Catalog.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + name: Optional[str] = None + """Name of Table.""" + + schema_name: Optional[str] = None + """Name of Schema.""" def as_dict(self) -> dict: - """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageTableInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body def as_shallow_dict(self) -> dict: - """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageTableInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: - """Deserializes the EncryptionDetails from a dictionary.""" - return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTableInfo: + """Deserializes the ExternalLineageTableInfo from a dictionary.""" + return cls( + catalog_name=d.get("catalog_name", None), + event_time=d.get("event_time", None), + name=d.get("name", None), + schema_name=d.get("schema_name", None), + ) @dataclass @@ -3569,10 +4300,9 @@ class ExternalLocationInfo: """Name of the storage credential used with this location.""" enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" + """Whether to enable file events on this external location.""" encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is @@ -3580,7 +4310,7 @@ class ExternalLocationInfo: sufficient.""" file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" + """File event queue settings.""" isolation_mode: Optional[IsolationMode] = None @@ -3712,6 +4442,137 @@ def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: ) +@dataclass +class ExternalMetadata: + name: str + """Name of the external metadata object.""" + + system_type: SystemType + """Type of external system.""" + + entity_type: str + """Type of entity within the external system.""" + + columns: Optional[List[str]] = None + """List of columns associated with the external metadata object.""" + + create_time: Optional[str] = None + """Time at which this external metadata object was created.""" + + created_by: Optional[str] = None + """Username of external metadata object creator.""" + + description: Optional[str] = None + """User-provided free-form text description.""" + + id: Optional[str] = None + """Unique identifier of the external metadata object.""" + + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" + + owner: Optional[str] = None + """Owner of the external metadata object.""" + + properties: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the external metadata object.""" + + update_time: Optional[str] = None + """Time at which this external metadata object was last modified.""" + + updated_by: Optional[str] = None + """Username of user who last modified external metadata object.""" + + url: Optional[str] = None + """URL associated with the external metadata object.""" + + def as_dict(self) -> dict: + """Serializes the ExternalMetadata into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v for v in self.columns] + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.description is not None: + body["description"] = self.description + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.system_type is not None: + body["system_type"] = self.system_type.value + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.description is not None: + body["description"] = self.description + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.system_type is not None: + body["system_type"] = self.system_type + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalMetadata: + """Deserializes the ExternalMetadata from a dictionary.""" + return cls( + columns=d.get("columns", None), + create_time=d.get("create_time", None), + created_by=d.get("created_by", None), + description=d.get("description", None), + entity_type=d.get("entity_type", None), + id=d.get("id", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + system_type=_enum(d, "system_type", SystemType), + update_time=d.get("update_time", None), + updated_by=d.get("updated_by", None), + url=d.get("url", None), + ) + + @dataclass class FailedStatus: """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the @@ -3828,6 +4689,9 @@ class ForeignKeyConstraint: parent_columns: List[str] """Column names for this constraint.""" + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" + def as_dict(self) -> dict: """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} @@ -3839,6 +4703,8 @@ def as_dict(self) -> dict: body["parent_columns"] = [v for v in self.parent_columns] if self.parent_table is not None: body["parent_table"] = self.parent_table + if self.rely is not None: + body["rely"] = self.rely return body def as_shallow_dict(self) -> dict: @@ -3852,6 +4718,8 @@ def as_shallow_dict(self) -> dict: body["parent_columns"] = self.parent_columns if self.parent_table is not None: body["parent_table"] = self.parent_table + if self.rely is not None: + body["rely"] = self.rely return body @classmethod @@ -3862,6 +4730,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: name=d.get("name", None), parent_columns=d.get("parent_columns", None), parent_table=d.get("parent_table", None), + rely=d.get("rely", None), ) @@ -4205,10 +5074,8 @@ class FunctionParameterInfo: """Default value of the parameter.""" parameter_mode: Optional[FunctionParameterMode] = None - """The mode of the function parameter.""" parameter_type: Optional[FunctionParameterType] = None - """The type of function parameter.""" type_interval_type: Optional[str] = None """Format of IntervalType.""" @@ -4371,7 +5238,7 @@ class GcpPubsub: subscription_name: Optional[str] = None """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} - REQUIRED for provided_pubsub.""" + Required for provided_pubsub.""" def as_dict(self) -> dict: """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" @@ -4463,10 +5330,8 @@ class GenerateTemporaryServiceCredentialRequest: """The name of the service credential used to generate a temporary credential""" azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None - """The Azure cloud options to customize the requested temporary credential""" gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None - """The GCP cloud options to customize the requested temporary credential""" def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" @@ -4537,29 +5402,18 @@ def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRequest @dataclass class GenerateTemporaryTableCredentialResponse: aws_temp_credentials: Optional[AwsCredentials] = None - """AWS temporary credentials for API authentication. Read more at - https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" azure_aad: Optional[AzureActiveDirectoryToken] = None - """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed - Identity. Read more at - https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None - """Azure temporary credentials for API authentication. Read more at - https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" gcp_oauth_token: Optional[GcpOauthToken] = None - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" r2_temp_credentials: Optional[R2Credentials] = None - """R2 temporary credentials for API authentication. Read more at - https://developers.cloudflare.com/r2/api/s3/tokens/.""" url: Optional[str] = None """The URL of the storage path accessible by the temporary credential.""" @@ -4921,6 +5775,12 @@ class IsolationMode(Enum): ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" +class LineageDirection(Enum): + + DOWNSTREAM = "DOWNSTREAM" + UPSTREAM = "UPSTREAM" + + @dataclass class ListAccountMetastoreAssignmentsResponse: """The list of workspaces to which the given metastore is assigned.""" @@ -5074,6 +5934,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: ) +@dataclass +class ListExternalLineageRelationshipsResponse: + external_lineage_relationships: Optional[List[ExternalLineageInfo]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListExternalLineageRelationshipsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_lineage_relationships: + body["external_lineage_relationships"] = [v.as_dict() for v in self.external_lineage_relationships] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListExternalLineageRelationshipsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_lineage_relationships: + body["external_lineage_relationships"] = self.external_lineage_relationships + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListExternalLineageRelationshipsResponse: + """Deserializes the ListExternalLineageRelationshipsResponse from a dictionary.""" + return cls( + external_lineage_relationships=_repeated_dict(d, "external_lineage_relationships", ExternalLineageInfo), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListExternalLocationsResponse: external_locations: Optional[List[ExternalLocationInfo]] = None @@ -5110,6 +6003,39 @@ def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: ) +@dataclass +class ListExternalMetadataResponse: + external_metadata: Optional[List[ExternalMetadata]] = None + + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ListExternalMetadataResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_metadata: + body["external_metadata"] = [v.as_dict() for v in self.external_metadata] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListExternalMetadataResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_metadata: + body["external_metadata"] = self.external_metadata + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListExternalMetadataResponse: + """Deserializes the ListExternalMetadataResponse from a dictionary.""" + return cls( + external_metadata=_repeated_dict(d, "external_metadata", ExternalMetadata), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ListFunctionsResponse: functions: Optional[List[FunctionInfo]] = None @@ -6098,7 +7024,6 @@ class MonitorInfo: """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" status: MonitorInfoStatus - """The status of the monitor.""" monitor_version: str """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" @@ -6784,26 +7709,18 @@ class OnlineTableStatus: """Status of an online table.""" continuous_update_status: Optional[ContinuousUpdateStatus] = None - """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE - or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" detailed_state: Optional[OnlineTableState] = None """The state of the online table.""" failed_status: Optional[FailedStatus] = None - """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the - ONLINE_PIPELINE_FAILED state.""" message: Optional[str] = None """A text description of the current state of the online table.""" provisioning_status: Optional[ProvisioningStatus] = None - """Detailed status of an online table. Shown if the online table is in the - PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" triggered_update_status: Optional[TriggeredUpdateStatus] = None - """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE - or the ONLINE_NO_PENDING_UPDATE state.""" def as_dict(self) -> dict: """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" @@ -6840,18 +7757,181 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: - """Deserializes the OnlineTableStatus from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: + """Deserializes the OnlineTableStatus from a dictionary.""" + return cls( + continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", OnlineTableState), + failed_status=_from_dict(d, "failed_status", FailedStatus), + message=d.get("message", None), + provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), + ) + + +@dataclass +class OptionSpec: + """Spec of an allowed option on a securable kind and its attributes. This is mostly used by UI to + provide user friendly hints and descriptions in order to facilitate the securable creation + process.""" + + allowed_values: Optional[List[str]] = None + """For drop down / radio button selections, UI will want to know the possible input values, it can + also be used by other option types to limit input selections.""" + + default_value: Optional[str] = None + """The default value of the option, for example, value '443' for 'port' option.""" + + description: Optional[str] = None + """A concise user facing description of what the input value of this option should look like.""" + + hint: Optional[str] = None + """The hint is used on the UI to suggest what the input value can possibly be like, for example: + example.com for 'host' option. Unlike default value, it will not be applied automatically + without user input.""" + + is_copiable: Optional[bool] = None + """Indicates whether an option should be displayed with copy button on the UI.""" + + is_creatable: Optional[bool] = None + """Indicates whether an option can be provided by users in the create/update path of an entity.""" + + is_hidden: Optional[bool] = None + """Is the option value not user settable and is thus not shown on the UI.""" + + is_loggable: Optional[bool] = None + """Specifies whether this option is safe to log, i.e. no sensitive information.""" + + is_required: Optional[bool] = None + """Is the option required.""" + + is_secret: Optional[bool] = None + """Is the option value considered secret and thus redacted on the UI.""" + + is_updatable: Optional[bool] = None + """Is the option updatable by users.""" + + name: Optional[str] = None + """The unique name of the option.""" + + oauth_stage: Optional[OptionSpecOauthStage] = None + """Specifies when the option value is displayed on the UI within the OAuth flow.""" + + type: Optional[OptionSpecOptionType] = None + """The type of the option.""" + + def as_dict(self) -> dict: + """Serializes the OptionSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allowed_values: + body["allowed_values"] = [v for v in self.allowed_values] + if self.default_value is not None: + body["default_value"] = self.default_value + if self.description is not None: + body["description"] = self.description + if self.hint is not None: + body["hint"] = self.hint + if self.is_copiable is not None: + body["is_copiable"] = self.is_copiable + if self.is_creatable is not None: + body["is_creatable"] = self.is_creatable + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_loggable is not None: + body["is_loggable"] = self.is_loggable + if self.is_required is not None: + body["is_required"] = self.is_required + if self.is_secret is not None: + body["is_secret"] = self.is_secret + if self.is_updatable is not None: + body["is_updatable"] = self.is_updatable + if self.name is not None: + body["name"] = self.name + if self.oauth_stage is not None: + body["oauth_stage"] = self.oauth_stage.value + if self.type is not None: + body["type"] = self.type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OptionSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allowed_values: + body["allowed_values"] = self.allowed_values + if self.default_value is not None: + body["default_value"] = self.default_value + if self.description is not None: + body["description"] = self.description + if self.hint is not None: + body["hint"] = self.hint + if self.is_copiable is not None: + body["is_copiable"] = self.is_copiable + if self.is_creatable is not None: + body["is_creatable"] = self.is_creatable + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_loggable is not None: + body["is_loggable"] = self.is_loggable + if self.is_required is not None: + body["is_required"] = self.is_required + if self.is_secret is not None: + body["is_secret"] = self.is_secret + if self.is_updatable is not None: + body["is_updatable"] = self.is_updatable + if self.name is not None: + body["name"] = self.name + if self.oauth_stage is not None: + body["oauth_stage"] = self.oauth_stage + if self.type is not None: + body["type"] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OptionSpec: + """Deserializes the OptionSpec from a dictionary.""" return cls( - continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), - detailed_state=_enum(d, "detailed_state", OnlineTableState), - failed_status=_from_dict(d, "failed_status", FailedStatus), - message=d.get("message", None), - provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), - triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), + allowed_values=d.get("allowed_values", None), + default_value=d.get("default_value", None), + description=d.get("description", None), + hint=d.get("hint", None), + is_copiable=d.get("is_copiable", None), + is_creatable=d.get("is_creatable", None), + is_hidden=d.get("is_hidden", None), + is_loggable=d.get("is_loggable", None), + is_required=d.get("is_required", None), + is_secret=d.get("is_secret", None), + is_updatable=d.get("is_updatable", None), + name=d.get("name", None), + oauth_stage=_enum(d, "oauth_stage", OptionSpecOauthStage), + type=_enum(d, "type", OptionSpecOptionType), ) +class OptionSpecOauthStage(Enum): + """During the OAuth flow, specifies which stage the option should be displayed in the UI. + OAUTH_STAGE_UNSPECIFIED is the default value for options unrelated to the OAuth flow. + BEFORE_AUTHORIZATION_CODE corresponds to options necessary to initiate the OAuth process. + BEFORE_ACCESS_TOKEN corresponds to options that are necessary to create a foreign connection, + but that should be displayed after the authorization code has already been received.""" + + BEFORE_ACCESS_TOKEN = "BEFORE_ACCESS_TOKEN" + BEFORE_AUTHORIZATION_CODE = "BEFORE_AUTHORIZATION_CODE" + + +class OptionSpecOptionType(Enum): + """Type of the option, we purposely follow JavaScript types so that the UI can map the options to + JS types. https://www.w3schools.com/js/js_datatypes.asp Enum is a special case that it's just + string with selections.""" + + OPTION_BIGINT = "OPTION_BIGINT" + OPTION_BOOLEAN = "OPTION_BOOLEAN" + OPTION_ENUM = "OPTION_ENUM" + OPTION_MULTILINE_STRING = "OPTION_MULTILINE_STRING" + OPTION_NUMBER = "OPTION_NUMBER" + OPTION_SERVICE_CREDENTIAL = "OPTION_SERVICE_CREDENTIAL" + OPTION_STRING = "OPTION_STRING" + + @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None @@ -6965,6 +8045,9 @@ class PrimaryKeyConstraint: child_columns: List[str] """Column names for this constraint.""" + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" + timeseries_columns: Optional[List[str]] = None """Column names that represent a timeseries.""" @@ -6975,6 +8058,8 @@ def as_dict(self) -> dict: body["child_columns"] = [v for v in self.child_columns] if self.name is not None: body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely if self.timeseries_columns: body["timeseries_columns"] = [v for v in self.timeseries_columns] return body @@ -6986,6 +8071,8 @@ def as_shallow_dict(self) -> dict: body["child_columns"] = self.child_columns if self.name is not None: body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely if self.timeseries_columns: body["timeseries_columns"] = self.timeseries_columns return body @@ -6996,6 +8083,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: return cls( child_columns=d.get("child_columns", None), name=d.get("name", None), + rely=d.get("rely", None), timeseries_columns=d.get("timeseries_columns", None), ) @@ -7056,7 +8144,8 @@ class Privilege(Enum): @dataclass class PrivilegeAssignment: principal: Optional[str] = None - """The principal (user email address or group name).""" + """The principal (user email address or group name). For deleted principals, `principal` is empty + while `principal_id` is populated.""" privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -7662,6 +8751,138 @@ def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: ) +class SecurableKind(Enum): + """Latest kind: TABLE_DELTA_ICEBERG_DELTASHARING = 252; Next id:253""" + + TABLE_DB_STORAGE = "TABLE_DB_STORAGE" + TABLE_DELTA = "TABLE_DELTA" + TABLE_DELTASHARING = "TABLE_DELTASHARING" + TABLE_DELTASHARING_MUTABLE = "TABLE_DELTASHARING_MUTABLE" + TABLE_DELTA_EXTERNAL = "TABLE_DELTA_EXTERNAL" + TABLE_DELTA_ICEBERG_DELTASHARING = "TABLE_DELTA_ICEBERG_DELTASHARING" + TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" + TABLE_DELTA_UNIFORM_HUDI_EXTERNAL = "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" + TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL = "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL = ( + "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" + ) + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED = ( + "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED" + ) + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE = "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE" + TABLE_EXTERNAL = "TABLE_EXTERNAL" + TABLE_FEATURE_STORE = "TABLE_FEATURE_STORE" + TABLE_FEATURE_STORE_EXTERNAL = "TABLE_FEATURE_STORE_EXTERNAL" + TABLE_FOREIGN_BIGQUERY = "TABLE_FOREIGN_BIGQUERY" + TABLE_FOREIGN_DATABRICKS = "TABLE_FOREIGN_DATABRICKS" + TABLE_FOREIGN_DELTASHARING = "TABLE_FOREIGN_DELTASHARING" + TABLE_FOREIGN_HIVE_METASTORE = "TABLE_FOREIGN_HIVE_METASTORE" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL = ( + "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL" + ) + TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW" + TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_VIEW" + TABLE_FOREIGN_MONGODB = "TABLE_FOREIGN_MONGODB" + TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" + TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" + TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" + TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" + TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" + TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW" + TABLE_FOREIGN_SNOWFLAKE = "TABLE_FOREIGN_SNOWFLAKE" + TABLE_FOREIGN_SQLDW = "TABLE_FOREIGN_SQLDW" + TABLE_FOREIGN_SQLSERVER = "TABLE_FOREIGN_SQLSERVER" + TABLE_FOREIGN_TERADATA = "TABLE_FOREIGN_TERADATA" + TABLE_FOREIGN_WORKDAY_RAAS = "TABLE_FOREIGN_WORKDAY_RAAS" + TABLE_ICEBERG_UNIFORM_MANAGED = "TABLE_ICEBERG_UNIFORM_MANAGED" + TABLE_INTERNAL = "TABLE_INTERNAL" + TABLE_MANAGED_POSTGRESQL = "TABLE_MANAGED_POSTGRESQL" + TABLE_MATERIALIZED_VIEW = "TABLE_MATERIALIZED_VIEW" + TABLE_MATERIALIZED_VIEW_DELTASHARING = "TABLE_MATERIALIZED_VIEW_DELTASHARING" + TABLE_METRIC_VIEW = "TABLE_METRIC_VIEW" + TABLE_ONLINE_VECTOR_INDEX_DIRECT = "TABLE_ONLINE_VECTOR_INDEX_DIRECT" + TABLE_ONLINE_VECTOR_INDEX_REPLICA = "TABLE_ONLINE_VECTOR_INDEX_REPLICA" + TABLE_ONLINE_VIEW = "TABLE_ONLINE_VIEW" + TABLE_STANDARD = "TABLE_STANDARD" + TABLE_STREAMING_LIVE_TABLE = "TABLE_STREAMING_LIVE_TABLE" + TABLE_STREAMING_LIVE_TABLE_DELTASHARING = "TABLE_STREAMING_LIVE_TABLE_DELTASHARING" + TABLE_SYSTEM = "TABLE_SYSTEM" + TABLE_SYSTEM_DELTASHARING = "TABLE_SYSTEM_DELTASHARING" + TABLE_VIEW = "TABLE_VIEW" + TABLE_VIEW_DELTASHARING = "TABLE_VIEW_DELTASHARING" + + +@dataclass +class SecurableKindManifest: + """Manifest of a specific securable kind.""" + + assignable_privileges: Optional[List[str]] = None + """Privileges that can be assigned to the securable.""" + + capabilities: Optional[List[str]] = None + """A list of capabilities in the securable kind.""" + + options: Optional[List[OptionSpec]] = None + """Detailed specs of allowed options.""" + + securable_kind: Optional[SecurableKind] = None + """Securable kind to get manifest of.""" + + securable_type: Optional[SecurableType] = None + """Securable Type of the kind.""" + + def as_dict(self) -> dict: + """Serializes the SecurableKindManifest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.assignable_privileges: + body["assignable_privileges"] = [v for v in self.assignable_privileges] + if self.capabilities: + body["capabilities"] = [v for v in self.capabilities] + if self.options: + body["options"] = [v.as_dict() for v in self.options] + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind.value + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SecurableKindManifest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.assignable_privileges: + body["assignable_privileges"] = self.assignable_privileges + if self.capabilities: + body["capabilities"] = self.capabilities + if self.options: + body["options"] = self.options + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind + if self.securable_type is not None: + body["securable_type"] = self.securable_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SecurableKindManifest: + """Deserializes the SecurableKindManifest from a dictionary.""" + return cls( + assignable_privileges=d.get("assignable_privileges", None), + capabilities=d.get("capabilities", None), + options=_repeated_dict(d, "options", OptionSpec), + securable_kind=_enum(d, "securable_kind", SecurableKind), + securable_type=_enum(d, "securable_type", SecurableType), + ) + + class SecurableType(Enum): """The type of Unity Catalog securable.""" @@ -7681,7 +8902,6 @@ class SecurableType(Enum): STAGING_TABLE = "STAGING_TABLE" STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" TABLE = "TABLE" - UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE" VOLUME = "VOLUME" @@ -8031,6 +9251,31 @@ def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: return cls(schema=d.get("schema", None), state=d.get("state", None)) +class SystemType(Enum): + + AMAZON_REDSHIFT = "AMAZON_REDSHIFT" + AZURE_SYNAPSE = "AZURE_SYNAPSE" + CONFLUENT = "CONFLUENT" + GOOGLE_BIGQUERY = "GOOGLE_BIGQUERY" + KAFKA = "KAFKA" + LOOKER = "LOOKER" + MICROSOFT_FABRIC = "MICROSOFT_FABRIC" + MICROSOFT_SQL_SERVER = "MICROSOFT_SQL_SERVER" + MONGODB = "MONGODB" + MYSQL = "MYSQL" + ORACLE = "ORACLE" + OTHER = "OTHER" + POSTGRESQL = "POSTGRESQL" + POWER_BI = "POWER_BI" + SALESFORCE = "SALESFORCE" + SAP = "SAP" + SERVICENOW = "SERVICENOW" + SNOWFLAKE = "SNOWFLAKE" + TABLEAU = "TABLEAU" + TERADATA = "TERADATA" + WORKDAY = "WORKDAY" + + @dataclass class TableConstraint: """A table constraint, as defined by *one* of the following fields being set: @@ -8155,7 +9400,6 @@ class TableInfo: """Unique ID of the Data Access Configuration to use with the table data.""" data_source_format: Optional[DataSourceFormat] = None - """Data source format""" deleted_at: Optional[int] = None """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not @@ -8169,7 +9413,6 @@ class TableInfo: enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" full_name: Optional[str] = None """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" @@ -8195,6 +9438,9 @@ class TableInfo: schema_name: Optional[str] = None """Name of parent schema relative to its parent catalog.""" + securable_kind_manifest: Optional[SecurableKindManifest] = None + """SecurableKindManifest of table, including capabilities the table has.""" + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" @@ -8202,7 +9448,7 @@ class TableInfo: """Name of the storage credential, when a storage credential is configured for use with this table.""" storage_location: Optional[str] = None - """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables)""" + """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables).""" table_constraints: Optional[List[TableConstraint]] = None """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" @@ -8275,6 +9521,8 @@ def as_dict(self) -> dict: body["row_filter"] = self.row_filter.as_dict() if self.schema_name is not None: body["schema_name"] = self.schema_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() if self.sql_path is not None: body["sql_path"] = self.sql_path if self.storage_credential_name is not None: @@ -8344,6 +9592,8 @@ def as_shallow_dict(self) -> dict: body["row_filter"] = self.row_filter if self.schema_name is not None: body["schema_name"] = self.schema_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest if self.sql_path is not None: body["sql_path"] = self.sql_path if self.storage_credential_name is not None: @@ -8396,6 +9646,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableInfo: properties=d.get("properties", None), row_filter=_from_dict(d, "row_filter", TableRowFilter), schema_name=d.get("schema_name", None), + securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), sql_path=d.get("sql_path", None), storage_credential_name=d.get("storage_credential_name", None), storage_location=d.get("storage_location", None), @@ -8453,6 +9704,9 @@ class TableSummary: full_name: Optional[str] = None """The full name of the table.""" + securable_kind_manifest: Optional[SecurableKindManifest] = None + """SecurableKindManifest of table, including capabilities the table has.""" + table_type: Optional[TableType] = None def as_dict(self) -> dict: @@ -8460,6 +9714,8 @@ def as_dict(self) -> dict: body = {} if self.full_name is not None: body["full_name"] = self.full_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() if self.table_type is not None: body["table_type"] = self.table_type.value return body @@ -8469,6 +9725,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.full_name is not None: body["full_name"] = self.full_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest if self.table_type is not None: body["table_type"] = self.table_type return body @@ -8476,7 +9734,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSummary: """Deserializes the TableSummary from a dictionary.""" - return cls(full_name=d.get("full_name", None), table_type=_enum(d, "table_type", TableType)) + return cls( + full_name=d.get("full_name", None), + securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), + table_type=_enum(d, "table_type", TableType), + ) class TableType(Enum): @@ -8487,6 +9749,7 @@ class TableType(Enum): MANAGED = "MANAGED" MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + METRIC_VIEW = "METRIC_VIEW" STREAMING_TABLE = "STREAMING_TABLE" VIEW = "VIEW" @@ -8526,21 +9789,14 @@ def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: @dataclass class TemporaryCredentials: aws_temp_credentials: Optional[AwsCredentials] = None - """AWS temporary credentials for API authentication. Read more at - https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" azure_aad: Optional[AzureActiveDirectoryToken] = None - """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed - Identity. Read more at - https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" gcp_oauth_token: Optional[GcpOauthToken] = None - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" def as_dict(self) -> dict: """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" @@ -8948,10 +10204,9 @@ class UpdateExternalLocation: """Name of the storage credential used with this location.""" enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" + """Whether to enable file events on this external location.""" encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is @@ -8959,7 +10214,7 @@ class UpdateExternalLocation: sufficient.""" file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" + """File event queue settings.""" force: Optional[bool] = None """Force update even if changing url invalidates dependent external tables or mounts.""" @@ -9517,6 +10772,65 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateRegisteredModelRequest: ) +@dataclass +class UpdateRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the UpdateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateRequestExternalLineage: + """Deserializes the UpdateRequestExternalLineage from a dictionary.""" + return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) + + @dataclass class UpdateResponse: def as_dict(self) -> dict: @@ -9730,12 +11044,11 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: @dataclass class UpdateTableRequest: - """Update a table owner.""" - full_name: Optional[str] = None """Full name of the table.""" owner: Optional[str] = None + """Username of current owner of table.""" def as_dict(self) -> dict: """Serializes the UpdateTableRequest into a dictionary suitable for use as a JSON request body.""" @@ -9939,16 +11252,13 @@ class ValidateCredentialRequest: """Next ID: 17""" aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration""" azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" credential_name: Optional[str] = None """Required. The name of an existing credential or long-lived cloud credential to validate.""" databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" external_location_name: Optional[str] = None """The name of an existing external location to validate. Only applicable for storage credentials @@ -10270,7 +11580,6 @@ class VolumeInfo: """The identifier of the user who created the volume""" encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" full_name: Optional[str] = None """The three-level (fully qualified) name of the volume""" @@ -10299,11 +11608,6 @@ class VolumeInfo: """The unique identifier of the volume""" volume_type: Optional[VolumeType] = None - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" @@ -11404,9 +12708,7 @@ def generate_temporary_service_credential( :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) - The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) - The GCP cloud options to customize the requested temporary credential :returns: :class:`TemporaryCredentials` """ @@ -11596,13 +12898,10 @@ def validate_credential( metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) - The Azure managed identity configuration. :param credential_name: str (optional) Required. The name of an existing credential or long-lived cloud credential to validate. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. :param external_location_name: str (optional) The name of an existing external location to validate. Only applicable for storage credentials (purpose is **STORAGE**.) @@ -11642,6 +12941,132 @@ def validate_credential( return ValidateCredentialResponse.from_dict(res) +class ExternalLineageAPI: + """External Lineage APIs enable defining and managing lineage relationships between Databricks objects and + external systems. These APIs allow users to capture data flows connecting Databricks tables, models, and + file paths with external metadata objects. + + With these APIs, users can create, update, delete, and list lineage relationships with support for + column-level mappings and custom properties.""" + + def __init__(self, api_client): + self._api = api_client + + def create_external_lineage_relationship( + self, external_lineage_relationship: CreateRequestExternalLineage + ) -> ExternalLineageRelationship: + """Creates an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`CreateRequestExternalLineage` + + :returns: :class:`ExternalLineageRelationship` + """ + body = external_lineage_relationship.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/lineage-tracking/external-lineage", body=body, headers=headers) + return ExternalLineageRelationship.from_dict(res) + + def delete_external_lineage_relationship(self, external_lineage_relationship: DeleteRequestExternalLineage): + """Deletes an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`DeleteRequestExternalLineage` + + + """ + + query = {} + if external_lineage_relationship is not None: + query["external_lineage_relationship"] = external_lineage_relationship.as_dict() + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", "/api/2.0/lineage-tracking/external-lineage", query=query, headers=headers) + + def list_external_lineage_relationships( + self, + object_info: ExternalLineageObject, + lineage_direction: LineageDirection, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[ExternalLineageInfo]: + """Lists external lineage relationships of a Databricks object or external metadata given a supplied + direction. + + :param object_info: :class:`ExternalLineageObject` + The object to query external lineage relationship on. + :param lineage_direction: :class:`LineageDirection` + The lineage direction to filter on. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ExternalLineageInfo` + """ + + query = {} + if lineage_direction is not None: + query["lineage_direction"] = lineage_direction.value + if object_info is not None: + query["object_info"] = object_info.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/lineage-tracking/external-lineage", query=query, headers=headers) + if "external_lineage_relationships" in json: + for v in json["external_lineage_relationships"]: + yield ExternalLineageInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_external_lineage_relationship( + self, external_lineage_relationship: UpdateRequestExternalLineage, update_mask: str + ) -> ExternalLineageRelationship: + """Updates an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`UpdateRequestExternalLineage` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`ExternalLineageRelationship` + """ + body = external_lineage_relationship.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/lineage-tracking/external-lineage", query=query, body=body, headers=headers + ) + return ExternalLineageRelationship.from_dict(res) + + class ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog @@ -11684,15 +13109,14 @@ def create( :param comment: str (optional) User-provided free-form text description. :param enable_file_events: bool (optional) - [Create:OPT Update:OPT] Whether to enable file events on this external location. + Whether to enable file events on this external location. :param encryption_details: :class:`EncryptionDetails` (optional) - Encryption options that apply to clients connecting to cloud storage. :param fallback: bool (optional) Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - [Create:OPT Update:OPT] File event queue settings. + File event queue settings. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -11847,15 +13271,14 @@ def update( :param credential_name: str (optional) Name of the storage credential used with this location. :param enable_file_events: bool (optional) - [Create:OPT Update:OPT] Whether to enable file events on this external location. + Whether to enable file events on this external location. :param encryption_details: :class:`EncryptionDetails` (optional) - Encryption options that apply to clients connecting to cloud storage. :param fallback: bool (optional) Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - [Create:OPT Update:OPT] File event queue settings. + File event queue settings. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) @@ -11908,6 +13331,137 @@ def update( return ExternalLocationInfo.from_dict(res) +class ExternalMetadataAPI: + """External Metadata objects enable customers to register and manage metadata about external systems within + Unity Catalog. + + These APIs provide a standardized way to create, update, retrieve, list, and delete external metadata + objects. Fine-grained authorization ensures that only users with appropriate permissions can view and + manage external metadata objects.""" + + def __init__(self, api_client): + self._api = api_client + + def create_external_metadata(self, external_metadata: ExternalMetadata) -> ExternalMetadata: + """Creates a new external metadata object in the parent metastore if the caller is a metastore admin or + has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users upon creation + by default. + + :param external_metadata: :class:`ExternalMetadata` + + :returns: :class:`ExternalMetadata` + """ + body = external_metadata.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/lineage-tracking/external-metadata", body=body, headers=headers) + return ExternalMetadata.from_dict(res) + + def delete_external_metadata(self, name: str): + """Deletes the external metadata object that matches the supplied name. The caller must be a metastore + admin, the owner of the external metadata object, or a user that has the **MANAGE** privilege. + + :param name: str + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/lineage-tracking/external-metadata/{name}", headers=headers) + + def get_external_metadata(self, name: str) -> ExternalMetadata: + """Gets the specified external metadata object in a metastore. The caller must be a metastore admin, the + owner of the external metadata object, or a user that has the **BROWSE** privilege. + + :param name: str + + :returns: :class:`ExternalMetadata` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/lineage-tracking/external-metadata/{name}", headers=headers) + return ExternalMetadata.from_dict(res) + + def list_external_metadata( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ExternalMetadata]: + """Gets an array of external metadata objects in the metastore. If the caller is the metastore admin, all + external metadata objects will be retrieved. Otherwise, only external metadata objects that the caller + has **BROWSE** on will be retrieved. There is no guarantee of a specific ordering of the elements in + the array. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ExternalMetadata` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/lineage-tracking/external-metadata", query=query, headers=headers) + if "external_metadata" in json: + for v in json["external_metadata"]: + yield ExternalMetadata.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_external_metadata( + self, name: str, external_metadata: ExternalMetadata, update_mask: str + ) -> ExternalMetadata: + """Updates the external metadata object that matches the supplied name. The caller can only update either + the owner or other metadata fields in one request. The caller must be a metastore admin, the owner of + the external metadata object, or a user that has the **MODIFY** privilege. If the caller is updating + the owner, they must also have the **MANAGE** privilege. + + :param name: str + Name of the external metadata object. + :param external_metadata: :class:`ExternalMetadata` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`ExternalMetadata` + """ + body = external_metadata.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/lineage-tracking/external-metadata/{name}", query=query, body=body, headers=headers + ) + return ExternalMetadata.from_dict(res) + + class FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog. @@ -12746,7 +14300,7 @@ def create(self, table: OnlineTable) -> Wait[OnlineTable]: """Create a new Online Table. :param table: :class:`OnlineTable` - Online Table information. + Specification of the online table to be created. :returns: Long-running operation waiter for :class:`OnlineTable`. @@ -14186,8 +15740,6 @@ def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstr :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` - A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. :returns: :class:`TableConstraint` """ @@ -14307,11 +15859,11 @@ def get( Full name of the table. :param include_browse: bool (optional) Whether to include tables in the response for which the principal can only access selective metadata - for + for. :param include_delta_metadata: bool (optional) Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :returns: :class:`TableInfo` """ @@ -14336,7 +15888,6 @@ def list( schema_name: str, *, include_browse: Optional[bool] = None, - include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None, max_results: Optional[int] = None, omit_columns: Optional[bool] = None, @@ -14356,11 +15907,9 @@ def list( Parent schema of tables. :param include_browse: bool (optional) Whether to include tables in the response for which the principal can only access selective metadata - for - :param include_delta_metadata: bool (optional) - Whether delta metadata should be included in the response. + for. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :param max_results: int (optional) Maximum number of tables to return. If not set, all the tables are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and a server @@ -14384,8 +15933,6 @@ def list( query["catalog_name"] = catalog_name if include_browse is not None: query["include_browse"] = include_browse - if include_delta_metadata is not None: - query["include_delta_metadata"] = include_delta_metadata if include_manifest_capabilities is not None: query["include_manifest_capabilities"] = include_manifest_capabilities if max_results is not None: @@ -14404,8 +15951,6 @@ def list( "Accept": "application/json", } - if "max_results" not in query: - query["max_results"] = 0 while True: json = self._api.do("GET", "/api/2.1/unity-catalog/tables", query=query, headers=headers) if "tables" in json: @@ -14439,7 +15984,7 @@ def list_summaries( :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :param max_results: int (optional) Maximum number of summaries for tables to return. If not set, the page length is set to a server configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is @@ -14473,8 +16018,6 @@ def list_summaries( "Accept": "application/json", } - if "max_results" not in query: - query["max_results"] = 0 while True: json = self._api.do("GET", "/api/2.1/unity-catalog/table-summaries", query=query, headers=headers) if "tables" in json: @@ -14493,6 +16036,7 @@ def update(self, full_name: str, *, owner: Optional[str] = None): :param full_name: str Full name of the table. :param owner: str (optional) + Username of current owner of table. """ @@ -14598,11 +16142,6 @@ def create( :param name: str The name of the volume :param volume_type: :class:`VolumeType` - The type of the volume. An external volume is located in the specified external location. A managed - volume is located in the default location which is specified by the parent schema, or the parent - catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 8dd2c1359..6c8b9525a 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -851,7 +851,6 @@ class CleanRoomRemoteDetail: 2. Its invite_recipient_email is empty.""" compliance_security_profile: Optional[ComplianceSecurityProfile] = None - """The compliance security profile used to process regulated data following compliance standards.""" creator: Optional[CleanRoomCollaborator] = None """Collaborator who creates the clean room.""" @@ -1063,24 +1062,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteCleanRoomAssetResponse: return cls() -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - @dataclass class ListCleanRoomAssetsResponse: assets: Optional[List[CleanRoomAsset]] = None @@ -1230,7 +1211,6 @@ def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` - Metadata of the clean room asset :returns: :class:`CleanRoomAsset` """ @@ -1332,7 +1312,8 @@ def update( For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` - Metadata of the clean room asset + The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to + update. :returns: :class:`CleanRoomAsset` """ @@ -1403,7 +1384,7 @@ def list( class CleanRoomsAPI: """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to - each other’s data.""" + each other's data.""" def __init__(self, api_client): self._api = api_client diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 5092314a0..def14aa81 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -168,9 +168,6 @@ class AwsAttributes: """Attributes set during cluster creation which are related to Amazon Web Services.""" availability: Optional[AwsAvailability] = None - """Availability type used for all subsequent nodes past the `first_on_demand` ones. - - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" ebs_volume_count: Optional[int] = None """The number of volumes launched for each instance. Users can choose up to 10 volumes. This @@ -593,7 +590,6 @@ class ClusterAccessControlRequest: """name of the group""" permission_level: Optional[ClusterPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -742,30 +738,6 @@ class ClusterAttributes: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" @@ -809,19 +781,6 @@ class ClusterAttributes: `spark_conf`, and `num_workers`""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes @@ -882,7 +841,6 @@ class ClusterAttributes: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the ClusterAttributes into a dictionary suitable for use as a JSON request body.""" @@ -1158,30 +1116,6 @@ class ClusterDetails: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" default_tags: Optional[Dict[str, str]] = None """Tags that are added by Databricks regardless of any `custom_tags`, including: @@ -1249,19 +1183,6 @@ class ClusterDetails: on this port in executor nodes.""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" last_restarted_time: Optional[int] = None """the timestamp that the cluster was started/restarted""" @@ -1369,7 +1290,6 @@ class ClusterDetails: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the ClusterDetails into a dictionary suitable for use as a JSON request body.""" @@ -1786,7 +1706,6 @@ class ClusterPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[ClusterPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ClusterPermission into a dictionary suitable for use as a JSON request body.""" @@ -1873,7 +1792,6 @@ class ClusterPermissionsDescription: description: Optional[str] = None permission_level: Optional[ClusterPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ClusterPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -1942,7 +1860,6 @@ class ClusterPolicyAccessControlRequest: """name of the group""" permission_level: Optional[ClusterPolicyPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -2053,7 +1970,6 @@ class ClusterPolicyPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[ClusterPolicyPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ClusterPolicyPermission into a dictionary suitable for use as a JSON request body.""" @@ -2138,7 +2054,6 @@ class ClusterPolicyPermissionsDescription: description: Optional[str] = None permission_level: Optional[ClusterPolicyPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -2351,30 +2266,6 @@ class ClusterSpec: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" @@ -2418,19 +2309,6 @@ class ClusterSpec: `spark_conf`, and `num_workers`""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes @@ -2505,7 +2383,6 @@ class ClusterSpec: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body.""" @@ -2873,30 +2750,6 @@ class CreateCluster: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" @@ -2940,19 +2793,6 @@ class CreateCluster: `spark_conf`, and `num_workers`""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes @@ -3023,7 +2863,6 @@ class CreateCluster: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body.""" @@ -4043,12 +3882,8 @@ class DiskType: """Describes the disk type.""" azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None - """All Azure Disk types that Databricks supports. See - https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" def as_dict(self) -> dict: """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" @@ -4218,30 +4053,6 @@ class EditCluster: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" @@ -4285,19 +4096,6 @@ class EditCluster: `spark_conf`, and `num_workers`""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes @@ -4368,7 +4166,6 @@ class EditCluster: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the EditCluster into a dictionary suitable for use as a JSON request body.""" @@ -6425,7 +6222,6 @@ class InstancePoolAccessControlRequest: """name of the group""" permission_level: Optional[InstancePoolPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -6854,8 +6650,6 @@ class InstancePoolGcpAttributes: """Attributes set during instance pool creation which are related to GCP.""" gcp_availability: Optional[GcpAvailability] = None - """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or - preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" local_ssd_count: Optional[int] = None """If provided, each node in the instance pool will have this number of local SSDs attached. Each @@ -6917,7 +6711,6 @@ class InstancePoolPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[InstancePoolPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the InstancePoolPermission into a dictionary suitable for use as a JSON request body.""" @@ -7003,7 +6796,6 @@ class InstancePoolPermissionsDescription: description: Optional[str] = None permission_level: Optional[InstancePoolPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the InstancePoolPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -9306,6 +9098,7 @@ class TerminationReasonCode(Enum): SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" @@ -9536,30 +9329,6 @@ class UpdateClusterResource: tags""" data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" @@ -9603,19 +9372,6 @@ class UpdateClusterResource: `spark_conf`, and `num_workers`""" kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes @@ -9690,7 +9446,6 @@ class UpdateClusterResource: `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" def as_dict(self) -> dict: """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body.""" @@ -10502,30 +10257,6 @@ def create( - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) - Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. * - `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias - for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple - users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: - A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. - Most programming languages, cluster features and data governance features are available in this - mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are - fully isolated so that they cannot see each other's data and credentials. Most data governance - features are supported in this mode. But programming languages and cluster features might be - limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency - clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on - standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC - nor passthrough enabled. :param docker_image: :class:`DockerImage` (optional) Custom docker image BYOC :param driver_instance_pool_id: str (optional) @@ -10559,19 +10290,6 @@ def create( When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) - The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -10630,7 +10348,6 @@ def create( `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) - Cluster Attributes showing for clusters workload types. :returns: Long-running operation waiter for :class:`ClusterDetails`. @@ -10907,30 +10624,6 @@ def edit( - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) - Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. * - `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias - for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple - users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: - A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. - Most programming languages, cluster features and data governance features are available in this - mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are - fully isolated so that they cannot see each other's data and credentials. Most data governance - features are supported in this mode. But programming languages and cluster features might be - limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency - clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on - standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC - nor passthrough enabled. :param docker_image: :class:`DockerImage` (optional) Custom docker image BYOC :param driver_instance_pool_id: str (optional) @@ -10964,19 +10657,6 @@ def edit( When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) - The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -11035,7 +10715,6 @@ def edit( `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) - Cluster Attributes showing for clusters workload types. :returns: Long-running operation waiter for :class:`ClusterDetails`. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 44cb76800..1276734bb 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -253,42 +253,6 @@ class DashboardView(Enum): DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" -@dataclass -class DeleteScheduleResponse: - def as_dict(self) -> dict: - """Serializes the DeleteScheduleResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScheduleResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScheduleResponse: - """Deserializes the DeleteScheduleResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteSubscriptionResponse: - def as_dict(self) -> dict: - """Serializes the DeleteSubscriptionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteSubscriptionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteSubscriptionResponse: - """Deserializes the DeleteSubscriptionResponse from a dictionary.""" - return cls() - - @dataclass class GenieAttachment: """Genie AI Response""" @@ -409,6 +373,46 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversation: ) +@dataclass +class GenieConversationSummary: + conversation_id: str + + title: str + + created_timestamp: int + + def as_dict(self) -> dict: + """Serializes the GenieConversationSummary into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.title is not None: + body["title"] = self.title + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieConversationSummary into a shallow dictionary of its immediate attributes.""" + body = {} + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.title is not None: + body["title"] = self.title + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: + """Deserializes the GenieConversationSummary from a dictionary.""" + return cls( + conversation_id=d.get("conversation_id", None), + created_timestamp=d.get("created_timestamp", None), + title=d.get("title", None), + ) + + @dataclass class GenieCreateConversationMessageRequest: content: str @@ -453,80 +457,64 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieCreateConversationMessageRequest: @dataclass -class GenieGenerateDownloadFullQueryResultResponse: - download_id: Optional[str] = None - """Download ID. Use this ID to track the download request in subsequent polling calls""" - - def as_dict(self) -> dict: - """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.download_id is not None: - body["download_id"] = self.download_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.download_id is not None: - body["download_id"] = self.download_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: - """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" - return cls(download_id=d.get("download_id", None)) - - -@dataclass -class GenieGetDownloadFullQueryResultResponse: +class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None """SQL Statement Execution response. See [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) for more details.""" def as_dict(self) -> dict: - """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.statement_response: body["statement_response"] = self.statement_response.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.statement_response: body["statement_response"] = self.statement_response return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: - """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GenieGetMessageQueryResultResponse: + """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) @dataclass -class GenieGetMessageQueryResultResponse: - statement_response: Optional[sql.StatementResponse] = None - """SQL Statement Execution response. See [Get status, manifest, and result first - chunk](:method:statementexecution/getstatement) for more details.""" +class GenieListConversationsResponse: + conversations: Optional[List[GenieConversationSummary]] = None + """List of conversations in the Genie space""" + + next_page_token: Optional[str] = None + """Token to get the next page of results""" def as_dict(self) -> dict: - """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenieListConversationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response.as_dict() + if self.conversations: + body["conversations"] = [v.as_dict() for v in self.conversations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the GenieListConversationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response + if self.conversations: + body["conversations"] = self.conversations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieGetMessageQueryResultResponse: - """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" - return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) + def from_dict(cls, d: Dict[str, Any]) -> GenieListConversationsResponse: + """Deserializes the GenieListConversationsResponse from a dictionary.""" + return cls( + conversations=_repeated_dict(d, "conversations", GenieConversationSummary), + next_page_token=d.get("next_page_token", None), + ) @dataclass @@ -595,20 +583,6 @@ class GenieMessage: `query_result_metadata` in `GenieQueryAttachment` instead.""" status: Optional[MessageStatus] = None - """MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * - `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`: - Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing a - generated SQL query. Get the SQL query result by calling - [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * - `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`: - Message processing is completed. Results are in the `attachments` field. Get the SQL query - result by calling - [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. * - `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available - anymore. The user needs to rerun the query. Rerun the SQL query result by calling - [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: - Message has been cancelled.""" user_id: Optional[int] = None """ID of the user who created the message""" @@ -1801,6 +1775,23 @@ def create_message_and_wait( timeout=timeout ) + def delete_conversation(self, space_id: str, conversation_id: str): + """Delete a conversation. + + :param space_id: str + The ID associated with the Genie space where the conversation is located. + :param conversation_id: str + The ID of the conversation to delete. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}", headers=headers) + def execute_message_attachment_query( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: @@ -1856,75 +1847,6 @@ def execute_message_query( ) return GenieGetMessageQueryResultResponse.from_dict(res) - def generate_download_full_query_result( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGenerateDownloadFullQueryResultResponse: - """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of - the download. The query result is stored in an external link and can be retrieved using the [Get - Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. - See [Execute Statement](:method:statementexecution/executestatement) for more details. - - :param space_id: str - Genie space ID - :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - :param attachment_id: str - Attachment ID - - :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", - headers=headers, - ) - return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) - - def get_download_full_query_result( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str - ) -> GenieGetDownloadFullQueryResultResponse: - """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and - successfully receiving a `download_id`, use this API to poll the download progress. When the download - is complete, the API returns one or more external links to the query result files. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. - You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` - disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute - Statement](:method:statementexecution/executestatement) for more details. - - :param space_id: str - Genie space ID - :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - :param attachment_id: str - Attachment ID - :param download_id: str - Download ID. This ID is provided by the [Generate Download - endpoint](:method:genie/generateDownloadFullQueryResult) - - :returns: :class:`GenieGetDownloadFullQueryResultResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", - headers=headers, - ) - return GenieGetDownloadFullQueryResultResponse.from_dict(res) - def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: """Get message from conversation. @@ -2050,6 +1972,33 @@ def get_space(self, space_id: str) -> GenieSpace: res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", headers=headers) return GenieSpace.from_dict(res) + def list_conversations( + self, space_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> GenieListConversationsResponse: + """Get a list of conversations in a Genie Space. + + :param space_id: str + The ID of the Genie space to retrieve conversations from. + :param page_size: int (optional) + Maximum number of conversations to return per page + :param page_token: str (optional) + Token to get the next page of results + + :returns: :class:`GenieListConversationsResponse` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}/conversations", query=query, headers=headers) + return GenieListConversationsResponse.from_dict(res) + def list_spaces( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> GenieListSpacesResponse: @@ -2109,6 +2058,21 @@ def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: def start_conversation_and_wait(self, space_id: str, content: str, timeout=timedelta(minutes=20)) -> GenieMessage: return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) + def trash_space(self, space_id: str): + """Move a Genie Space to the trash. + + :param space_id: str + The ID associated with the Genie space to be sent to the trash. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can @@ -2139,6 +2103,7 @@ def create_schedule(self, dashboard_id: str, schedule: Schedule) -> Schedule: :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` + The schedule to create. A dashboard is limited to 10 schedules. :returns: :class:`Schedule` """ @@ -2159,6 +2124,7 @@ def create_subscription(self, dashboard_id: str, schedule_id: str, subscription: :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` + The subscription to create. A schedule is limited to 100 subscriptions. :returns: :class:`Subscription` """ @@ -2558,6 +2524,7 @@ def update_schedule(self, dashboard_id: str, schedule_id: str, schedule: Schedul :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` + The schedule to update. :returns: :class:`Schedule` """ diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 186447595..9dd59f0ac 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -113,24 +113,67 @@ class DatabaseInstance: capacity: Optional[str] = None """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" + child_instance_refs: Optional[List[DatabaseInstanceRef]] = None + """The refs of the child instances. This is only available if the instance is parent instance.""" + creation_time: Optional[str] = None """The timestamp when the instance was created.""" creator: Optional[str] = None """The email of the creator of the instance.""" + effective_enable_readable_secondaries: Optional[bool] = None + """xref AIP-129. `enable_readable_secondaries` is owned by the client, while + `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries` + will only be set in Create/Update response messages if and only if the user provides the field + via the request. `effective_enable_readable_secondaries` on the other hand will always bet set + in all response messages (Create/Update/Get/List).""" + + effective_node_count: Optional[int] = None + """xref AIP-129. `node_count` is owned by the client, while `effective_node_count` is owned by the + server. `node_count` will only be set in Create/Update response messages if and only if the user + provides the field via the request. `effective_node_count` on the other hand will always bet set + in all response messages (Create/Update/Get/List).""" + + effective_retention_window_in_days: Optional[int] = None + """xref AIP-129. `retention_window_in_days` is owned by the client, while + `effective_retention_window_in_days` is owned by the server. `retention_window_in_days` will + only be set in Create/Update response messages if and only if the user provides the field via + the request. `effective_retention_window_in_days` on the other hand will always bet set in all + response messages (Create/Update/Get/List).""" + effective_stopped: Optional[bool] = None """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the server. `stopped` will only be set in Create/Update response messages if and only if the user provides the field via the request. `effective_stopped` on the other hand will always bet set in all response messages (Create/Update/Get/List).""" + enable_readable_secondaries: Optional[bool] = None + """Whether to enable secondaries to serve read-only traffic. Defaults to false.""" + + node_count: Optional[int] = None + """The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults + to 1 primary and 0 secondaries.""" + + parent_instance_ref: Optional[DatabaseInstanceRef] = None + """The ref of the parent instance. This is only available if the instance is child instance. Input: + For specifying the parent instance to create a child instance. Optional. Output: Only populated + if provided as input to create a child instance.""" + pg_version: Optional[str] = None """The version of Postgres running on the instance.""" + read_only_dns: Optional[str] = None + """The DNS endpoint to connect to the instance for read only access. This is only available if + enable_readable_secondaries is true.""" + read_write_dns: Optional[str] = None """The DNS endpoint to connect to the instance for read+write access.""" + retention_window_in_days: Optional[int] = None + """The retention window for the instance. This is the time window in days for which the historical + data is retained. The default value is 7 days. Valid values are 2 to 35 days.""" + state: Optional[DatabaseInstanceState] = None """The current state of the instance.""" @@ -145,18 +188,36 @@ def as_dict(self) -> dict: body = {} if self.capacity is not None: body["capacity"] = self.capacity + if self.child_instance_refs: + body["child_instance_refs"] = [v.as_dict() for v in self.child_instance_refs] if self.creation_time is not None: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_enable_readable_secondaries is not None: + body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries + if self.effective_node_count is not None: + body["effective_node_count"] = self.effective_node_count + if self.effective_retention_window_in_days is not None: + body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.enable_readable_secondaries is not None: + body["enable_readable_secondaries"] = self.enable_readable_secondaries if self.name is not None: body["name"] = self.name + if self.node_count is not None: + body["node_count"] = self.node_count + if self.parent_instance_ref: + body["parent_instance_ref"] = self.parent_instance_ref.as_dict() if self.pg_version is not None: body["pg_version"] = self.pg_version + if self.read_only_dns is not None: + body["read_only_dns"] = self.read_only_dns if self.read_write_dns is not None: body["read_write_dns"] = self.read_write_dns + if self.retention_window_in_days is not None: + body["retention_window_in_days"] = self.retention_window_in_days if self.state is not None: body["state"] = self.state.value if self.stopped is not None: @@ -170,18 +231,36 @@ def as_shallow_dict(self) -> dict: body = {} if self.capacity is not None: body["capacity"] = self.capacity + if self.child_instance_refs: + body["child_instance_refs"] = self.child_instance_refs if self.creation_time is not None: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_enable_readable_secondaries is not None: + body["effective_enable_readable_secondaries"] = self.effective_enable_readable_secondaries + if self.effective_node_count is not None: + body["effective_node_count"] = self.effective_node_count + if self.effective_retention_window_in_days is not None: + body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.enable_readable_secondaries is not None: + body["enable_readable_secondaries"] = self.enable_readable_secondaries if self.name is not None: body["name"] = self.name + if self.node_count is not None: + body["node_count"] = self.node_count + if self.parent_instance_ref: + body["parent_instance_ref"] = self.parent_instance_ref if self.pg_version is not None: body["pg_version"] = self.pg_version + if self.read_only_dns is not None: + body["read_only_dns"] = self.read_only_dns if self.read_write_dns is not None: body["read_write_dns"] = self.read_write_dns + if self.retention_window_in_days is not None: + body["retention_window_in_days"] = self.retention_window_in_days if self.state is not None: body["state"] = self.state if self.stopped is not None: @@ -195,18 +274,216 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: """Deserializes the DatabaseInstance from a dictionary.""" return cls( capacity=d.get("capacity", None), + child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef), creation_time=d.get("creation_time", None), creator=d.get("creator", None), + effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None), + effective_node_count=d.get("effective_node_count", None), + effective_retention_window_in_days=d.get("effective_retention_window_in_days", None), effective_stopped=d.get("effective_stopped", None), + enable_readable_secondaries=d.get("enable_readable_secondaries", None), name=d.get("name", None), + node_count=d.get("node_count", None), + parent_instance_ref=_from_dict(d, "parent_instance_ref", DatabaseInstanceRef), pg_version=d.get("pg_version", None), + read_only_dns=d.get("read_only_dns", None), read_write_dns=d.get("read_write_dns", None), + retention_window_in_days=d.get("retention_window_in_days", None), state=_enum(d, "state", DatabaseInstanceState), stopped=d.get("stopped", None), uid=d.get("uid", None), ) +@dataclass +class DatabaseInstanceRef: + """DatabaseInstanceRef is a reference to a database instance. It is used in the DatabaseInstance + object to refer to the parent instance of an instance and to refer the child instances of an + instance. To specify as a parent instance during creation of an instance, the lsn and + branch_time fields are optional. If not specified, the child instance will be created from the + latest lsn of the parent. If both lsn and branch_time are specified, the lsn will be used to + create the child instance.""" + + branch_time: Optional[str] = None + """Branch time of the ref database instance. For a parent ref instance, this is the point in time + on the parent instance from which the instance was created. For a child ref instance, this is + the point in time on the instance from which the child instance was created. Input: For + specifying the point in time to create a child instance. Optional. Output: Only populated if + provided as input to create a child instance.""" + + effective_lsn: Optional[str] = None + """xref AIP-129. `lsn` is owned by the client, while `effective_lsn` is owned by the server. `lsn` + will only be set in Create/Update response messages if and only if the user provides the field + via the request. `effective_lsn` on the other hand will always bet set in all response messages + (Create/Update/Get/List). For a parent ref instance, this is the LSN on the parent instance from + which the instance was created. For a child ref instance, this is the LSN on the instance from + which the child instance was created.""" + + lsn: Optional[str] = None + """User-specified WAL LSN of the ref database instance. + + Input: For specifying the WAL LSN to create a child instance. Optional. Output: Only populated + if provided as input to create a child instance.""" + + name: Optional[str] = None + """Name of the ref database instance.""" + + uid: Optional[str] = None + """Id of the ref database instance.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseInstanceRef into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.branch_time is not None: + body["branch_time"] = self.branch_time + if self.effective_lsn is not None: + body["effective_lsn"] = self.effective_lsn + if self.lsn is not None: + body["lsn"] = self.lsn + if self.name is not None: + body["name"] = self.name + if self.uid is not None: + body["uid"] = self.uid + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseInstanceRef into a shallow dictionary of its immediate attributes.""" + body = {} + if self.branch_time is not None: + body["branch_time"] = self.branch_time + if self.effective_lsn is not None: + body["effective_lsn"] = self.effective_lsn + if self.lsn is not None: + body["lsn"] = self.lsn + if self.name is not None: + body["name"] = self.name + if self.uid is not None: + body["uid"] = self.uid + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef: + """Deserializes the DatabaseInstanceRef from a dictionary.""" + return cls( + branch_time=d.get("branch_time", None), + effective_lsn=d.get("effective_lsn", None), + lsn=d.get("lsn", None), + name=d.get("name", None), + uid=d.get("uid", None), + ) + + +@dataclass +class DatabaseInstanceRole: + """A DatabaseInstanceRole represents a Postgres role in a database instance.""" + + attributes: Optional[DatabaseInstanceRoleAttributes] = None + """API-exposed Postgres role attributes""" + + identity_type: Optional[DatabaseInstanceRoleIdentityType] = None + """The type of the role.""" + + membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None + """An enum value for a standard role that this role is a member of.""" + + name: Optional[str] = None + """The name of the role. This is the unique identifier for the role in an instance.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.attributes: + body["attributes"] = self.attributes.as_dict() + if self.identity_type is not None: + body["identity_type"] = self.identity_type.value + if self.membership_role is not None: + body["membership_role"] = self.membership_role.value + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseInstanceRole into a shallow dictionary of its immediate attributes.""" + body = {} + if self.attributes: + body["attributes"] = self.attributes + if self.identity_type is not None: + body["identity_type"] = self.identity_type + if self.membership_role is not None: + body["membership_role"] = self.membership_role + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole: + """Deserializes the DatabaseInstanceRole from a dictionary.""" + return cls( + attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes), + identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType), + membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole), + name=d.get("name", None), + ) + + +@dataclass +class DatabaseInstanceRoleAttributes: + """Attributes that can be granted to a Postgres role. We are only implementing a subset for now, + see xref: https://www.postgresql.org/docs/16/sql-createrole.html The values follow Postgres + keyword naming e.g. CREATEDB, BYPASSRLS, etc. which is why they don't include typical + underscores between words. We were requested to make this a nested object/struct representation + since these are knobs from an external spec.""" + + bypassrls: Optional[bool] = None + + createdb: Optional[bool] = None + + createrole: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the DatabaseInstanceRoleAttributes into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bypassrls is not None: + body["bypassrls"] = self.bypassrls + if self.createdb is not None: + body["createdb"] = self.createdb + if self.createrole is not None: + body["createrole"] = self.createrole + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseInstanceRoleAttributes into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bypassrls is not None: + body["bypassrls"] = self.bypassrls + if self.createdb is not None: + body["createdb"] = self.createdb + if self.createrole is not None: + body["createrole"] = self.createrole + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRoleAttributes: + """Deserializes the DatabaseInstanceRoleAttributes from a dictionary.""" + return cls( + bypassrls=d.get("bypassrls", None), createdb=d.get("createdb", None), createrole=d.get("createrole", None) + ) + + +class DatabaseInstanceRoleIdentityType(Enum): + + GROUP = "GROUP" + PG_ONLY = "PG_ONLY" + SERVICE_PRINCIPAL = "SERVICE_PRINCIPAL" + USER = "USER" + + +class DatabaseInstanceRoleMembershipRole(Enum): + """Roles that the DatabaseInstanceRole can be a member of.""" + + DATABRICKS_SUPERUSER = "DATABRICKS_SUPERUSER" + + class DatabaseInstanceState(Enum): AVAILABLE = "AVAILABLE" @@ -231,18 +508,15 @@ class DatabaseTable: MUST match that of the registered catalog (or the request will be rejected).""" logical_database_name: Optional[str] = None - """Target Postgres database object (logical database) name for this table. This field is optional - in all scenarios. + """Target Postgres database object (logical database) name for this table. When creating a table in a registered Postgres catalog, the target Postgres database name is inferred to be that of the registered catalog. If this field is specified in this scenario, the Postgres database name MUST match that of the registered catalog (or the request will be rejected). - When creating a table in a standard catalog, the target database name is inferred to be that of - the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary - postgres database. Note that this has implications for the `create_database_objects_is_missing` - field in `spec`.""" + When creating a table in a standard catalog, this field is required. In this scenario, + specifying this field will allow targeting an arbitrary postgres database.""" def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" @@ -277,81 +551,49 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: @dataclass -class DeleteDatabaseCatalogResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse: - """Deserializes the DeleteDatabaseCatalogResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteDatabaseInstanceResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse: - """Deserializes the DeleteDatabaseInstanceResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteDatabaseTableResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body +class DeltaTableSyncInfo: + delta_commit_timestamp: Optional[str] = None + """The timestamp when the above Delta version was committed in the source Delta table. Note: This + is the Delta commit time, not the time the data was written to the synced table.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse: - """Deserializes the DeleteDatabaseTableResponse from a dictionary.""" - return cls() + delta_commit_version: Optional[int] = None + """The Delta Lake commit version that was last successfully synced.""" - -@dataclass -class DeleteSyncedDatabaseTableResponse: def as_dict(self) -> dict: - """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeltaTableSyncInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.delta_commit_timestamp is not None: + body["delta_commit_timestamp"] = self.delta_commit_timestamp + if self.delta_commit_version is not None: + body["delta_commit_version"] = self.delta_commit_version return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DeltaTableSyncInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.delta_commit_timestamp is not None: + body["delta_commit_timestamp"] = self.delta_commit_timestamp + if self.delta_commit_version is not None: + body["delta_commit_version"] = self.delta_commit_version return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse: - """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: + """Deserializes the DeltaTableSyncInfo from a dictionary.""" + return cls( + delta_commit_timestamp=d.get("delta_commit_timestamp", None), + delta_commit_version=d.get("delta_commit_version", None), + ) @dataclass class GenerateDatabaseCredentialRequest: """Generates a credential that can be used to access database instances""" + claims: Optional[List[RequestedClaims]] = None + """The returned token will be scoped to the union of instance_names and instances containing the + specified UC tables, so instance_names is allowed to be empty.""" + instance_names: Optional[List[str]] = None """Instances to which the token will be scoped.""" @@ -360,6 +602,8 @@ class GenerateDatabaseCredentialRequest: def as_dict(self) -> dict: """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} + if self.claims: + body["claims"] = [v.as_dict() for v in self.claims] if self.instance_names: body["instance_names"] = [v for v in self.instance_names] if self.request_id is not None: @@ -369,6 +613,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} + if self.claims: + body["claims"] = self.claims if self.instance_names: body["instance_names"] = self.instance_names if self.request_id is not None: @@ -378,7 +624,46 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest: """Deserializes the GenerateDatabaseCredentialRequest from a dictionary.""" - return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None)) + return cls( + claims=_repeated_dict(d, "claims", RequestedClaims), + instance_names=d.get("instance_names", None), + request_id=d.get("request_id", None), + ) + + +@dataclass +class ListDatabaseInstanceRolesResponse: + database_instance_roles: Optional[List[DatabaseInstanceRole]] = None + """List of database instance roles.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseInstanceRolesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_instance_roles: + body["database_instance_roles"] = [v.as_dict() for v in self.database_instance_roles] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseInstanceRolesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_instance_roles: + body["database_instance_roles"] = self.database_instance_roles + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstanceRolesResponse: + """Deserializes the ListDatabaseInstanceRolesResponse from a dictionary.""" + return cls( + database_instance_roles=_repeated_dict(d, "database_instance_roles", DatabaseInstanceRole), + next_page_token=d.get("next_page_token", None), + ) @dataclass @@ -463,6 +748,77 @@ class ProvisioningInfoState(Enum): UPDATING = "UPDATING" +@dataclass +class RequestedClaims: + permission_set: Optional[RequestedClaimsPermissionSet] = None + + resources: Optional[List[RequestedResource]] = None + + def as_dict(self) -> dict: + """Serializes the RequestedClaims into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission_set is not None: + body["permission_set"] = self.permission_set.value + if self.resources: + body["resources"] = [v.as_dict() for v in self.resources] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RequestedClaims into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_set is not None: + body["permission_set"] = self.permission_set + if self.resources: + body["resources"] = self.resources + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RequestedClaims: + """Deserializes the RequestedClaims from a dictionary.""" + return cls( + permission_set=_enum(d, "permission_set", RequestedClaimsPermissionSet), + resources=_repeated_dict(d, "resources", RequestedResource), + ) + + +class RequestedClaimsPermissionSet(Enum): + """Might add WRITE in the future""" + + READ_ONLY = "READ_ONLY" + + +@dataclass +class RequestedResource: + table_name: Optional[str] = None + + unspecified_resource_name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the RequestedResource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.table_name is not None: + body["table_name"] = self.table_name + if self.unspecified_resource_name is not None: + body["unspecified_resource_name"] = self.unspecified_resource_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RequestedResource into a shallow dictionary of its immediate attributes.""" + body = {} + if self.table_name is not None: + body["table_name"] = self.table_name + if self.unspecified_resource_name is not None: + body["unspecified_resource_name"] = self.unspecified_resource_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RequestedResource: + """Deserializes the RequestedResource from a dictionary.""" + return cls( + table_name=d.get("table_name", None), unspecified_resource_name=d.get("unspecified_resource_name", None) + ) + + @dataclass class SyncedDatabaseTable: """Next field marker: 12""" @@ -481,20 +837,18 @@ class SyncedDatabaseTable: rejected).""" logical_database_name: Optional[str] = None - """Target Postgres database object (logical database) name for this table. This field is optional - in all scenarios. + """Target Postgres database object (logical database) name for this table. When creating a synced table in a registered Postgres catalog, the target Postgres database name is inferred to be that of the registered catalog. If this field is specified in this scenario, the Postgres database name MUST match that of the registered catalog (or the request will be rejected). - When creating a synced table in a standard catalog, the target database name is inferred to be - that of the standard catalog. In this scenario, specifying this field will allow targeting an - arbitrary postgres database.""" + When creating a synced table in a standard catalog, this field is required. In this scenario, + specifying this field will allow targeting an arbitrary postgres database. Note that this has + implications for the `create_database_objects_is_missing` field in `spec`.""" spec: Optional[SyncedTableSpec] = None - """Specification of a synced database table.""" unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the @@ -557,12 +911,11 @@ class SyncedTableContinuousUpdateStatus: """Progress of the initial data synchronization.""" last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the synced table. Note that this Delta - version may not be completely synced to the synced table yet.""" + """The last source table Delta version that was successfully synced to the synced table.""" timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the synced - table.""" + """The end timestamp of the last time any data was synchronized from the source table to the synced + table. This is when the data is available in the synced table.""" def as_dict(self) -> dict: """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" @@ -602,12 +955,12 @@ class SyncedTableFailedStatus: SYNCED_PIPELINE_FAILED state.""" last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the synced table. Note that this Delta - version may only be partially synced to the synced table. Only populated if the table is still - synced and available for serving.""" + """The last source table Delta version that was successfully synced to the synced table. The last + source table Delta version that was synced to the synced table. Only populated if the table is + still synced and available for serving.""" timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the synced + """The end timestamp of the last time any data was synchronized from the source table to the synced table. Only populated if the table is still synced and available for serving.""" def as_dict(self) -> dict: @@ -699,6 +1052,51 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress: ) +@dataclass +class SyncedTablePosition: + delta_table_sync_info: Optional[DeltaTableSyncInfo] = None + + sync_end_timestamp: Optional[str] = None + """The end timestamp of the most recent successful synchronization. This is the time when the data + is available in the synced table.""" + + sync_start_timestamp: Optional[str] = None + """The starting timestamp of the most recent successful synchronization from the source table to + the destination (synced) table. Note this is the starting timestamp of the sync operation, not + the end time. E.g., for a batch, this is the time when the sync operation started.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTablePosition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.delta_table_sync_info: + body["delta_table_sync_info"] = self.delta_table_sync_info.as_dict() + if self.sync_end_timestamp is not None: + body["sync_end_timestamp"] = self.sync_end_timestamp + if self.sync_start_timestamp is not None: + body["sync_start_timestamp"] = self.sync_start_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTablePosition into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_table_sync_info: + body["delta_table_sync_info"] = self.delta_table_sync_info + if self.sync_end_timestamp is not None: + body["sync_end_timestamp"] = self.sync_end_timestamp + if self.sync_start_timestamp is not None: + body["sync_start_timestamp"] = self.sync_start_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePosition: + """Deserializes the SyncedTablePosition from a dictionary.""" + return cls( + delta_table_sync_info=_from_dict(d, "delta_table_sync_info", DeltaTableSyncInfo), + sync_end_timestamp=d.get("sync_end_timestamp", None), + sync_start_timestamp=d.get("sync_start_timestamp", None), + ) + + @dataclass class SyncedTableProvisioningStatus: """Detailed status of a synced table. Shown if the synced table is in the @@ -839,15 +1237,24 @@ class SyncedTableStatus: """Status of a synced table.""" continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None - """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE - or the SYNCED_UPDATING_PIPELINE_RESOURCES state.""" detailed_state: Optional[SyncedTableState] = None """The state of the synced table.""" failed_status: Optional[SyncedTableFailedStatus] = None - """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the - SYNCED_PIPELINE_FAILED state.""" + + last_sync: Optional[SyncedTablePosition] = None + """Summary of the last successful synchronization from source to destination. + + Will always be present if there has been a successful sync. Even if the most recent syncs have + failed. + + Limitation: The only exception is if the synced table is doing a FULL REFRESH, then the last + sync information will not be available until the full refresh is complete. This limitation will + be addressed in a future version. + + This top-level field is a convenience for consumers who want easy access to last sync + information without having to traverse detailed_status.""" message: Optional[str] = None """A text description of the current state of the synced table.""" @@ -857,12 +1264,8 @@ class SyncedTableStatus: of bin packing), or generated by the server (when creating a new pipeline).""" provisioning_status: Optional[SyncedTableProvisioningStatus] = None - """Detailed status of a synced table. Shown if the synced table is in the - PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None - """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE - or the SYNCED_NO_PENDING_UPDATE state.""" def as_dict(self) -> dict: """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body.""" @@ -873,6 +1276,8 @@ def as_dict(self) -> dict: body["detailed_state"] = self.detailed_state.value if self.failed_status: body["failed_status"] = self.failed_status.as_dict() + if self.last_sync: + body["last_sync"] = self.last_sync.as_dict() if self.message is not None: body["message"] = self.message if self.pipeline_id is not None: @@ -892,6 +1297,8 @@ def as_shallow_dict(self) -> dict: body["detailed_state"] = self.detailed_state if self.failed_status: body["failed_status"] = self.failed_status + if self.last_sync: + body["last_sync"] = self.last_sync if self.message is not None: body["message"] = self.message if self.pipeline_id is not None: @@ -909,6 +1316,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus: continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus), detailed_state=_enum(d, "detailed_state", SyncedTableState), failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus), + last_sync=_from_dict(d, "last_sync", SyncedTablePosition), message=d.get("message", None), pipeline_id=d.get("pipeline_id", None), provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus), @@ -922,12 +1330,11 @@ class SyncedTableTriggeredUpdateStatus: or the SYNCED_NO_PENDING_UPDATE state.""" last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the synced table. Note that this Delta - version may not be completely synced to the synced table yet.""" + """The last source table Delta version that was successfully synced to the synced table.""" timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the synced - table.""" + """The end timestamp of the last time any data was synchronized from the source table to the synced + table. This is when the data is available in the synced table.""" triggered_update_progress: Optional[SyncedTablePipelineProgress] = None """Progress of the active data synchronization pipeline.""" @@ -990,7 +1397,7 @@ def create_database_instance(self, database_instance: DatabaseInstance) -> Datab """Create a Database Instance. :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + Instance to create. :returns: :class:`DatabaseInstance` """ @@ -1003,11 +1410,30 @@ def create_database_instance(self, database_instance: DatabaseInstance) -> Datab res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers) return DatabaseInstance.from_dict(res) + def create_database_instance_role( + self, instance_name: str, database_instance_role: DatabaseInstanceRole + ) -> DatabaseInstanceRole: + """Create a role for a Database Instance. + + :param instance_name: str + :param database_instance_role: :class:`DatabaseInstanceRole` + + :returns: :class:`DatabaseInstanceRole` + """ + body = database_instance_role.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers) + return DatabaseInstanceRole.from_dict(res) + def create_database_table(self, table: DatabaseTable) -> DatabaseTable: - """Create a Database Table. + """Create a Database Table. Useful for registering pre-existing PG tables in UC. See + CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC. :param table: :class:`DatabaseTable` - Next field marker: 13 :returns: :class:`DatabaseTable` """ @@ -1024,7 +1450,6 @@ def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> Syn """Create a Synced Database Table. :param synced_table: :class:`SyncedDatabaseTable` - Next field marker: 12 :returns: :class:`SyncedDatabaseTable` """ @@ -1081,6 +1506,38 @@ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, p self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers) + def delete_database_instance_role( + self, + instance_name: str, + name: str, + *, + allow_missing: Optional[bool] = None, + reassign_owned_to: Optional[str] = None, + ): + """Deletes a role for a Database Instance. + + :param instance_name: str + :param name: str + :param allow_missing: bool (optional) + This is the AIP standard name for the equivalent of Postgres' `IF EXISTS` option + :param reassign_owned_to: str (optional) + + + """ + + query = {} + if allow_missing is not None: + query["allow_missing"] = allow_missing + if reassign_owned_to is not None: + query["reassign_owned_to"] = reassign_owned_to + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/database/instances/{instance_name}/roles/{name}", query=query, headers=headers + ) + def delete_database_table(self, name: str): """Delete a Database Table. @@ -1129,10 +1586,17 @@ def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> Databas return DatabaseInstance.from_dict(res) def generate_database_credential( - self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None + self, + *, + claims: Optional[List[RequestedClaims]] = None, + instance_names: Optional[List[str]] = None, + request_id: Optional[str] = None, ) -> DatabaseCredential: """Generates a credential that can be used to access database instances. + :param claims: List[:class:`RequestedClaims`] (optional) + The returned token will be scoped to the union of instance_names and instances containing the + specified UC tables, so instance_names is allowed to be empty. :param instance_names: List[str] (optional) Instances to which the token will be scoped. :param request_id: str (optional) @@ -1140,6 +1604,8 @@ def generate_database_credential( :returns: :class:`DatabaseCredential` """ body = {} + if claims is not None: + body["claims"] = [v.as_dict() for v in claims] if instance_names is not None: body["instance_names"] = [v for v in instance_names] if request_id is not None: @@ -1183,6 +1649,22 @@ def get_database_instance(self, name: str) -> DatabaseInstance: res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers) return DatabaseInstance.from_dict(res) + def get_database_instance_role(self, instance_name: str, name: str) -> DatabaseInstanceRole: + """Gets a role for a Database Instance. + + :param instance_name: str + :param name: str + + :returns: :class:`DatabaseInstanceRole` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/instances/{instance_name}/roles/{name}", headers=headers) + return DatabaseInstanceRole.from_dict(res) + def get_database_table(self, name: str) -> DatabaseTable: """Get a Database Table. @@ -1213,6 +1695,40 @@ def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers) return SyncedDatabaseTable.from_dict(res) + def list_database_instance_roles( + self, instance_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseInstanceRole]: + """START OF PG ROLE APIs Section + + :param instance_name: str + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Instances. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseInstanceRole` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", f"/api/2.0/database/instances/{instance_name}/roles", query=query, headers=headers + ) + if "database_instance_roles" in json: + for v in json["database_instance_roles"]: + yield DatabaseInstanceRole.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + def list_database_instances( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DatabaseInstance]: @@ -1252,7 +1768,6 @@ def update_database_instance( :param name: str The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. :param update_mask: str The list of fields to update. diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 45208bbcd..a25767c7a 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -21,7 +21,6 @@ class AccessControlRequest: """name of the group""" permission_level: Optional[PermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -1220,7 +1219,6 @@ class Permission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[PermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the Permission into a dictionary suitable for use as a JSON request body.""" @@ -1387,7 +1385,6 @@ class PermissionsDescription: description: Optional[str] = None permission_level: Optional[PermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the PermissionsDescription into a dictionary suitable for use as a JSON request body.""" diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 5a75f3a00..5b6c4442b 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -219,11 +219,6 @@ class BaseRun: """The URL to the detail page of the run.""" run_type: Optional[RunType] = None - """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * - `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit - run. A run created with :method:jobs/submit. - - [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" @@ -244,7 +239,6 @@ class BaseRun: """Deprecated. Please use the `status` field instead.""" status: Optional[RunStatus] = None - """The current status of the run""" tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call @@ -253,20 +247,8 @@ class BaseRun: root to determine if more results are available.""" trigger: Optional[TriggerType] = None - """The type of trigger that fired this run. - - * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: - One time triggers that fire a single run. This occurs you triggered a single run on demand - through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a - previously failed run. This occurs when you request to re-run the job in case of failures. * - `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: - Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is - triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update. * - `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run. - * `MODEL`: Indicates a run that is triggered by a model update.""" trigger_info: Optional[TriggerInfo] = None - """Additional details about what triggered the run""" def as_dict(self) -> dict: """Serializes the BaseRun into a dictionary suitable for use as a JSON request body.""" @@ -1005,7 +987,6 @@ class CreateJob: are used, `git_source` must be defined on the job.""" health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. @@ -1044,10 +1025,6 @@ class CreateJob: """The queue settings of the job.""" run_as: Optional[JobRunAs] = None - """Write-only setting. Specifies the user or service principal that the job runs as. If not - specified, the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -2558,8 +2535,6 @@ class GitSource: with git_branch or git_tag.""" git_snapshot: Optional[GitSnapshot] = None - """Read-only state of the remote repository at the time the job was run. This field is only - included on job runs.""" git_tag: Optional[str] = None """Name of the tag to be checked out and used by this job. This field cannot be specified in @@ -2732,7 +2707,6 @@ class JobAccessControlRequest: """name of the group""" permission_level: Optional[JobPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -3055,9 +3029,6 @@ class JobEnvironment: """The key of an environment. It has to be unique within a job.""" spec: Optional[compute.Environment] = None - """The environment entity used to preserve serverless environment side panel, jobs' environment for - non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal - environment spec, only pip dependencies are supported.""" def as_dict(self) -> dict: """Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body.""" @@ -3198,7 +3169,6 @@ class JobPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[JobPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the JobPermission into a dictionary suitable for use as a JSON request body.""" @@ -3286,7 +3256,6 @@ class JobPermissionsDescription: description: Optional[str] = None permission_level: Optional[JobPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the JobPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -3436,7 +3405,6 @@ class JobSettings: are used, `git_source` must be defined on the job.""" health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. @@ -3475,10 +3443,6 @@ class JobSettings: """The queue settings of the job.""" run_as: Optional[JobRunAs] = None - """Write-only setting. Specifies the user or service principal that the job runs as. If not - specified, the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -3737,18 +3701,8 @@ class JobsHealthOperator(Enum): @dataclass class JobsHealthRule: metric: JobsHealthMetric - """Specifies the health metric that is being evaluated for a particular health rule. - - * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: - An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric - is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag - across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate - of the maximum consumer delay across all streams. This metric is in Public Preview. * - `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all - streams. This metric is in Public Preview.""" op: JobsHealthOperator - """Specifies the operator used to compare the health metric value with the specified threshold.""" value: int """Specifies the threshold value that the health metric should obey to satisfy the health rule.""" @@ -4454,11 +4408,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PythonWheelTask: @dataclass class QueueDetails: code: Optional[QueueDetailsCodeCode] = None - """The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to - reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was - queued due to reaching the per-job limit of concurrent job runs. * - `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of - active run job tasks.""" message: Optional[str] = None """A descriptive message with the queuing details. This field is unstructured, and its exact format @@ -4549,7 +4498,6 @@ class RepairHistoryItem: """Deprecated. Please use the `status` field instead.""" status: Optional[RunStatus] = None - """The current status of the run""" task_run_ids: Optional[List[int]] = None """The run IDs of the task runs that ran as part of this repair history item.""" @@ -5293,11 +5241,6 @@ class Run: """The URL to the detail page of the run.""" run_type: Optional[RunType] = None - """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * - `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit - run. A run created with :method:jobs/submit. - - [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" @@ -5318,7 +5261,6 @@ class Run: """Deprecated. Please use the `status` field instead.""" status: Optional[RunStatus] = None - """The current status of the run""" tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call @@ -5327,20 +5269,8 @@ class Run: root to determine if more results are available.""" trigger: Optional[TriggerType] = None - """The type of trigger that fired this run. - - * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: - One time triggers that fire a single run. This occurs you triggered a single run on demand - through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a - previously failed run. This occurs when you request to re-run the job in case of failures. * - `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: - Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is - triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update. * - `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run. - * `MODEL`: Indicates a run that is triggered by a model update.""" trigger_info: Optional[TriggerInfo] = None - """Additional details about what triggered the run""" def as_dict(self) -> dict: """Serializes the Run into a dictionary suitable for use as a JSON request body.""" @@ -6484,7 +6414,6 @@ class RunStatus: """If the run was queued, details about the reason for queuing the run.""" state: Optional[RunLifecycleStateV2State] = None - """The current state of the run.""" termination_details: Optional[TerminationDetails] = None """If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating the @@ -6721,7 +6650,6 @@ class RunTask: """Deprecated. Please use the `status` field instead.""" status: Optional[RunStatus] = None - """The current status of the run""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" @@ -7156,10 +7084,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SparkSubmitTask: @dataclass class SqlAlertOutput: alert_state: Optional[SqlAlertState] = None - """The state of the SQL alert. - - * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger - conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions""" output_link: Optional[str] = None """The link to find the output results.""" @@ -7792,7 +7716,6 @@ class SubmitRun: are used, `git_source` must be defined on the job.""" health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" idempotency_token: Optional[str] = None """An optional token that can be used to guarantee the idempotency of job run requests. If a run @@ -8001,7 +7924,6 @@ class SubmitTask: gen_ai_compute_task: Optional[GenAiComputeTask] = None health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty @@ -8430,7 +8352,6 @@ class Task: gen_ai_compute_task: Optional[GenAiComputeTask] = None health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in @@ -8942,55 +8863,12 @@ class TerminationCodeCode(Enum): @dataclass class TerminationDetails: code: Optional[TerminationCodeCode] = None - """The code indicates why the run was terminated. Additional codes might be introduced in future - releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run - was completed successfully but some child runs failed. * `USER_CANCELED`: The run was - successfully canceled during execution by a user. * `CANCELED`: The run was canceled during - execution by the Databricks platform; for example, if the maximum run duration was exceeded. * - `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency - type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The - run encountered an unexpected error. Refer to the state message for further details. * - `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * - `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further - details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when - communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because - it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The - workspace has reached the quota for the maximum number of concurrent active runs. Consider - scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it - tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The - number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. - Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run - failed due to an error when accessing the customer blob storage. Refer to the state message for - further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more - details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to - a permission issue while accessing a resource. Refer to the state message for further details. * - `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer - to the state message for further details. The causes might include, but are not limited to: The - provided library is invalid, there are insufficient permissions to install the library, and so - forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum - concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a - cluster that has already reached the maximum number of contexts it is configured to create. See: - [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to - the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an - invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The - run failed due to a cloud provider issue. Refer to the state message for further details. * - `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size - limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. - - [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" message: Optional[str] = None """A descriptive message with the termination details. This field is unstructured and the format might change.""" type: Optional[TerminationTypeType] = None - """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the - Databricks platform. Please look at the [status page] or contact support if the issue persists. - * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job - configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud - provider. - - [status page]: https://status.databricks.com/""" def as_dict(self) -> dict: """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body.""" @@ -9592,7 +9470,6 @@ def create( Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) - An optional set of health rules that can be defined for this job. :param job_clusters: List[:class:`JobCluster`] (optional) A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. @@ -9622,10 +9499,6 @@ def create( :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) - Write-only setting. Specifies the user or service principal that the job runs as. If not specified, - the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -10520,7 +10393,6 @@ def submit( Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) - An optional set of health rules that can be defined for this job. :param idempotency_token: str (optional) An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 939bf93f1..961480be7 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -2282,7 +2282,6 @@ class ListingSummary: share: Optional[ShareInfo] = None status: Optional[ListingStatus] = None - """Enums""" subtitle: Optional[str] = None @@ -2462,7 +2461,6 @@ class PersonalizationRequest: comment: Optional[str] = None contact_info: Optional[ContactInfo] = None - """contact info for the consumer requesting data or performing a listing installation""" created_at: Optional[int] = None diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index c8acbfd6d..3d22a2ad0 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -21,30 +21,18 @@ @dataclass class Activity: - """Activity recorded for the action.""" + """For activities, this contains the activity recorded for the action. For comments, this contains + the comment details. For transition requests, this contains the transition request details.""" activity_type: Optional[ActivityType] = None - """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage - transition. - - * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. - - * `CANCELLED_REQUEST`: User cancelled an existing transition request. - - * `APPROVED_REQUEST`: User approved the corresponding stage transition. - - * `REJECTED_REQUEST`: User rejected the coressponding stage transition. - - * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model - versions in a stage.""" comment: Optional[str] = None - """User-provided comment associated with the activity.""" + """User-provided comment associated with the activity, comment, or transition request.""" creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - from_stage: Optional[Stage] = None + from_stage: Optional[str] = None """Source stage of the transition (if the activity is stage transition related). Valid values are: * `None`: The initial stage of a model version. @@ -66,7 +54,7 @@ class Activity: usually describes a side effect, such as a version being archived as part of another version's stage transition, and may not be returned for some activity types.""" - to_stage: Optional[Stage] = None + to_stage: Optional[str] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: * `None`: The initial stage of a model version. @@ -90,7 +78,7 @@ def as_dict(self) -> dict: if self.creation_timestamp is not None: body["creation_timestamp"] = self.creation_timestamp if self.from_stage is not None: - body["from_stage"] = self.from_stage.value + body["from_stage"] = self.from_stage if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -98,7 +86,7 @@ def as_dict(self) -> dict: if self.system_comment is not None: body["system_comment"] = self.system_comment if self.to_stage is not None: - body["to_stage"] = self.to_stage.value + body["to_stage"] = self.to_stage if self.user_id is not None: body["user_id"] = self.user_id return body @@ -133,25 +121,32 @@ def from_dict(cls, d: Dict[str, Any]) -> Activity: activity_type=_enum(d, "activity_type", ActivityType), comment=d.get("comment", None), creation_timestamp=d.get("creation_timestamp", None), - from_stage=_enum(d, "from_stage", Stage), + from_stage=d.get("from_stage", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), system_comment=d.get("system_comment", None), - to_stage=_enum(d, "to_stage", Stage), + to_stage=d.get("to_stage", None), user_id=d.get("user_id", None), ) class ActivityAction(Enum): - """An action that a user (with sufficient permissions) could take on an activity. Valid values are: - * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + """An action that a user (with sufficient permissions) could take on an activity or comment. + + For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request * `REJECT_TRANSITION_REQUEST`: Reject a transition request - * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request""" + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + + For comments, valid values are: * `EDIT_COMMENT`: Edit the comment + + * `DELETE_COMMENT`: Delete the comment""" APPROVE_TRANSITION_REQUEST = "APPROVE_TRANSITION_REQUEST" CANCEL_TRANSITION_REQUEST = "CANCEL_TRANSITION_REQUEST" + DELETE_COMMENT = "DELETE_COMMENT" + EDIT_COMMENT = "EDIT_COMMENT" REJECT_TRANSITION_REQUEST = "REJECT_TRANSITION_REQUEST" @@ -181,13 +176,15 @@ class ActivityType(Enum): @dataclass class ApproveTransitionRequest: + """Details required to identify and approve a model version stage transition request.""" + name: str """Name of the model.""" version: str """Version of the model.""" - stage: Stage + stage: str """Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -214,7 +211,7 @@ def as_dict(self) -> dict: if self.name is not None: body["name"] = self.name if self.stage is not None: - body["stage"] = self.stage.value + body["stage"] = self.stage if self.version is not None: body["version"] = self.version return body @@ -241,7 +238,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: archive_existing_versions=d.get("archive_existing_versions", None), comment=d.get("comment", None), name=d.get("name", None), - stage=_enum(d, "stage", Stage), + stage=d.get("stage", None), version=d.get("version", None), ) @@ -249,7 +246,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: @dataclass class ApproveTransitionRequestResponse: activity: Optional[Activity] = None - """Activity recorded for the action.""" + """New activity generated as a result of this operation.""" def as_dict(self) -> dict: """Serializes the ApproveTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" @@ -272,30 +269,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequestResponse: class CommentActivityAction(Enum): - """An action that a user (with sufficient permissions) could take on a comment. Valid values are: * - `EDIT_COMMENT`: Edit the comment + """An action that a user (with sufficient permissions) could take on an activity or comment. + + For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + + * `REJECT_TRANSITION_REQUEST`: Reject a transition request + + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + + For comments, valid values are: * `EDIT_COMMENT`: Edit the comment * `DELETE_COMMENT`: Delete the comment""" + APPROVE_TRANSITION_REQUEST = "APPROVE_TRANSITION_REQUEST" + CANCEL_TRANSITION_REQUEST = "CANCEL_TRANSITION_REQUEST" DELETE_COMMENT = "DELETE_COMMENT" EDIT_COMMENT = "EDIT_COMMENT" + REJECT_TRANSITION_REQUEST = "REJECT_TRANSITION_REQUEST" @dataclass class CommentObject: - """Comment details.""" + """For activities, this contains the activity recorded for the action. For comments, this contains + the comment details. For transition requests, this contains the transition request details.""" available_actions: Optional[List[CommentActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" comment: Optional[str] = None - """User-provided comment on the action.""" + """User-provided comment associated with the activity, comment, or transition request.""" creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" id: Optional[str] = None - """Comment ID""" + """Unique identifier for the object.""" last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" @@ -352,6 +360,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CommentObject: @dataclass class CreateComment: + """Details required to create a comment on a model version.""" + name: str """Name of the model.""" @@ -392,7 +402,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateComment: @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None - """Comment details.""" + """New comment object""" def as_dict(self) -> dict: """Serializes the CreateCommentResponse into a dictionary suitable for use as a JSON request body.""" @@ -935,6 +945,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: @dataclass class CreateRegistryWebhook: + """Details required to create a registry webhook.""" + events: List[RegistryWebhookEvent] """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. @@ -969,8 +981,10 @@ class CreateRegistryWebhook: """User-specified description for the webhook.""" http_url_spec: Optional[HttpUrlSpec] = None + """External HTTPS URL called on event trigger (by using a POST request).""" job_spec: Optional[JobSpec] = None + """ID of the job that the webhook runs.""" model_name: Optional[str] = None """If model name is not specified, a registry-wide webhook is created that listens for the @@ -1119,13 +1133,15 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRunResponse: @dataclass class CreateTransitionRequest: + """Details required to create a model version stage transition request.""" + name: str """Name of the model.""" version: str """Version of the model.""" - stage: Stage + stage: str """Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -1147,7 +1163,7 @@ def as_dict(self) -> dict: if self.name is not None: body["name"] = self.name if self.stage is not None: - body["stage"] = self.stage.value + body["stage"] = self.stage if self.version is not None: body["version"] = self.version return body @@ -1171,7 +1187,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: return cls( comment=d.get("comment", None), name=d.get("name", None), - stage=_enum(d, "stage", Stage), + stage=d.get("stage", None), version=d.get("version", None), ) @@ -1179,7 +1195,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: @dataclass class CreateTransitionRequestResponse: request: Optional[TransitionRequest] = None - """Transition request details.""" + """New activity generated for stage transition request.""" def as_dict(self) -> dict: """Serializes the CreateTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" @@ -1502,24 +1518,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionTagResponse: return cls() -@dataclass -class DeleteOnlineStoreResponse: - def as_dict(self) -> dict: - """Serializes the DeleteOnlineStoreResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteOnlineStoreResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteOnlineStoreResponse: - """Deserializes the DeleteOnlineStoreResponse from a dictionary.""" - return cls() - - @dataclass class DeleteRun: run_id: str @@ -1685,28 +1683,27 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteTagResponse: @dataclass class DeleteTransitionRequestResponse: + activity: Optional[Activity] = None + """New activity generated as a result of this operation.""" + def as_dict(self) -> dict: """Serializes the DeleteTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.activity: + body["activity"] = self.activity.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the DeleteTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.activity: + body["activity"] = self.activity return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteTransitionRequestResponse: """Deserializes the DeleteTransitionRequestResponse from a dictionary.""" - return cls() - - -class DeleteTransitionRequestStage(Enum): - - ARCHIVED = "Archived" - NONE = "None" - PRODUCTION = "Production" - STAGING = "Staging" + return cls(activity=_from_dict(d, "activity", Activity)) @dataclass @@ -1811,7 +1808,6 @@ class ExperimentAccessControlRequest: """name of the group""" permission_level: Optional[ExperimentPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -1922,7 +1918,6 @@ class ExperimentPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[ExperimentPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ExperimentPermission into a dictionary suitable for use as a JSON request body.""" @@ -2009,7 +2004,6 @@ class ExperimentPermissionsDescription: description: Optional[str] = None permission_level: Optional[ExperimentPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ExperimentPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -2106,6 +2100,241 @@ def from_dict(cls, d: Dict[str, Any]) -> ExperimentTag: return cls(key=d.get("key", None), value=d.get("value", None)) +@dataclass +class Feature: + """Feature for model version.""" + + feature_name: Optional[str] = None + """Feature name""" + + feature_table_id: Optional[str] = None + """Feature table id""" + + feature_table_name: Optional[str] = None + """Feature table name""" + + def as_dict(self) -> dict: + """Serializes the Feature into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Feature into a shallow dictionary of its immediate attributes.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.feature_table_id is not None: + body["feature_table_id"] = self.feature_table_id + if self.feature_table_name is not None: + body["feature_table_name"] = self.feature_table_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Feature: + """Deserializes the Feature from a dictionary.""" + return cls( + feature_name=d.get("feature_name", None), + feature_table_id=d.get("feature_table_id", None), + feature_table_name=d.get("feature_table_name", None), + ) + + +@dataclass +class FeatureLineage: + feature_specs: Optional[List[FeatureLineageFeatureSpec]] = None + """List of feature specs that contain this feature.""" + + models: Optional[List[FeatureLineageModel]] = None + """List of Unity Catalog models that were trained on this feature.""" + + online_features: Optional[List[FeatureLineageOnlineFeature]] = None + """List of online features that use this feature as source.""" + + def as_dict(self) -> dict: + """Serializes the FeatureLineage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.feature_specs: + body["feature_specs"] = [v.as_dict() for v in self.feature_specs] + if self.models: + body["models"] = [v.as_dict() for v in self.models] + if self.online_features: + body["online_features"] = [v.as_dict() for v in self.online_features] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureLineage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.feature_specs: + body["feature_specs"] = self.feature_specs + if self.models: + body["models"] = self.models + if self.online_features: + body["online_features"] = self.online_features + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureLineage: + """Deserializes the FeatureLineage from a dictionary.""" + return cls( + feature_specs=_repeated_dict(d, "feature_specs", FeatureLineageFeatureSpec), + models=_repeated_dict(d, "models", FeatureLineageModel), + online_features=_repeated_dict(d, "online_features", FeatureLineageOnlineFeature), + ) + + +@dataclass +class FeatureLineageFeatureSpec: + name: Optional[str] = None + """The full name of the feature spec in Unity Catalog.""" + + def as_dict(self) -> dict: + """Serializes the FeatureLineageFeatureSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureLineageFeatureSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureLineageFeatureSpec: + """Deserializes the FeatureLineageFeatureSpec from a dictionary.""" + return cls(name=d.get("name", None)) + + +@dataclass +class FeatureLineageModel: + name: Optional[str] = None + """The full name of the model in Unity Catalog.""" + + version: Optional[int] = None + """The version of the model.""" + + def as_dict(self) -> dict: + """Serializes the FeatureLineageModel into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureLineageModel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureLineageModel: + """Deserializes the FeatureLineageModel from a dictionary.""" + return cls(name=d.get("name", None), version=d.get("version", None)) + + +@dataclass +class FeatureLineageOnlineFeature: + feature_name: Optional[str] = None + """The name of the online feature (column name).""" + + table_name: Optional[str] = None + """The full name of the online table in Unity Catalog.""" + + def as_dict(self) -> dict: + """Serializes the FeatureLineageOnlineFeature into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.table_name is not None: + body["table_name"] = self.table_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureLineageOnlineFeature into a shallow dictionary of its immediate attributes.""" + body = {} + if self.feature_name is not None: + body["feature_name"] = self.feature_name + if self.table_name is not None: + body["table_name"] = self.table_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureLineageOnlineFeature: + """Deserializes the FeatureLineageOnlineFeature from a dictionary.""" + return cls(feature_name=d.get("feature_name", None), table_name=d.get("table_name", None)) + + +@dataclass +class FeatureList: + """Feature list wrap all the features for a model version""" + + features: Optional[List[Feature]] = None + + def as_dict(self) -> dict: + """Serializes the FeatureList into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.features: + body["features"] = [v.as_dict() for v in self.features] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureList into a shallow dictionary of its immediate attributes.""" + body = {} + if self.features: + body["features"] = self.features + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureList: + """Deserializes the FeatureList from a dictionary.""" + return cls(features=_repeated_dict(d, "features", Feature)) + + +@dataclass +class FeatureTag: + """Represents a tag on a feature in a feature table.""" + + key: str + + value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the FeatureTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FeatureTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FeatureTag: + """Deserializes the FeatureTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + @dataclass class FileInfo: """Metadata of a single artifact file or directory.""" @@ -2753,9 +2982,8 @@ class JobSpecWithoutSecret: """ID of the job that the webhook runs.""" workspace_url: Optional[str] = None - """URL of the workspace containing the job that this webhook runs. Defaults to the workspace URL in - which the webhook is created. If not specified, the job’s workspace is assumed to be the same - as the webhook’s.""" + """URL of the workspace containing the job that this webhook runs. If not specified, the job’s + workspace URL is assumed to be the same as the workspace where the webhook is created.""" def as_dict(self) -> dict: """Serializes the JobSpecWithoutSecret into a dictionary suitable for use as a JSON request body.""" @@ -2859,6 +3087,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListExperimentsResponse: ) +@dataclass +class ListFeatureTagsResponse: + """Response message for ListFeatureTag.""" + + feature_tags: Optional[List[FeatureTag]] = None + + next_page_token: Optional[str] = None + """Pagination token to request the next page of results for this query.""" + + def as_dict(self) -> dict: + """Serializes the ListFeatureTagsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.feature_tags: + body["feature_tags"] = [v.as_dict() for v in self.feature_tags] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListFeatureTagsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.feature_tags: + body["feature_tags"] = self.feature_tags + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListFeatureTagsResponse: + """Deserializes the ListFeatureTagsResponse from a dictionary.""" + return cls( + feature_tags=_repeated_dict(d, "feature_tags", FeatureTag), next_page_token=d.get("next_page_token", None) + ) + + @dataclass class ListModelsResponse: next_page_token: Optional[str] = None @@ -3892,7 +4155,7 @@ class ModelDatabricks: """Unique identifier for the object.""" last_updated_timestamp: Optional[int] = None - """Time of the object at last update, as a Unix timestamp in milliseconds.""" + """Last update time of the object, as a Unix timestamp in milliseconds.""" latest_versions: Optional[List[ModelVersion]] = None """Array of model versions, each the latest version for its stage.""" @@ -3901,8 +4164,7 @@ class ModelDatabricks: """Name of the model.""" permission_level: Optional[PermissionLevel] = None - """Permission level of the requesting user on the object. For what is allowed at each level, see - [MLflow Model permissions](..).""" + """Permission level granted for the requesting user on this registered model""" tags: Optional[List[ModelTag]] = None """Array of tags associated with the model.""" @@ -4035,6 +4297,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelOutput: @dataclass class ModelTag: + """Tag for a registered model""" + key: Optional[str] = None """The tag key.""" @@ -4194,29 +4458,29 @@ class ModelVersionDatabricks: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - current_stage: Optional[Stage] = None - """Stage of the model version. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" + current_stage: Optional[str] = None description: Optional[str] = None """User-specified description for the object.""" + email_subscription_status: Optional[RegistryEmailSubscriptionType] = None + """Email Subscription Status: This is the subscription status of the user to the model version + Users get subscribed by interacting with the model version.""" + + feature_list: Optional[FeatureList] = None + """Feature lineage of `model_version`.""" + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" name: Optional[str] = None """Name of the model.""" + open_requests: Optional[List[Activity]] = None + """Open requests for this `model_versions`. Gap in sequence number is intentional and is done in + order to match field sequence numbers of `ModelVersion` proto message""" + permission_level: Optional[PermissionLevel] = None - """Permission level of the requesting user on the object. For what is allowed at each level, see - [MLflow Model permissions](..).""" run_id: Optional[str] = None """Unique identifier for the MLflow tracking run associated with the source model artifacts.""" @@ -4231,12 +4495,6 @@ class ModelVersionDatabricks: model version.""" status: Optional[Status] = None - """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register - a new model version is pending as server performs background tasks. - - * `FAILED_REGISTRATION`: Request to register a new model version has failed. - - * `READY`: Model version is ready for use.""" status_message: Optional[str] = None """Details on the current status, for example why registration failed.""" @@ -4256,13 +4514,19 @@ def as_dict(self) -> dict: if self.creation_timestamp is not None: body["creation_timestamp"] = self.creation_timestamp if self.current_stage is not None: - body["current_stage"] = self.current_stage.value + body["current_stage"] = self.current_stage if self.description is not None: body["description"] = self.description + if self.email_subscription_status is not None: + body["email_subscription_status"] = self.email_subscription_status.value + if self.feature_list: + body["feature_list"] = self.feature_list.as_dict() if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp if self.name is not None: body["name"] = self.name + if self.open_requests: + body["open_requests"] = [v.as_dict() for v in self.open_requests] if self.permission_level is not None: body["permission_level"] = self.permission_level.value if self.run_id is not None: @@ -4292,10 +4556,16 @@ def as_shallow_dict(self) -> dict: body["current_stage"] = self.current_stage if self.description is not None: body["description"] = self.description + if self.email_subscription_status is not None: + body["email_subscription_status"] = self.email_subscription_status + if self.feature_list: + body["feature_list"] = self.feature_list if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp if self.name is not None: body["name"] = self.name + if self.open_requests: + body["open_requests"] = self.open_requests if self.permission_level is not None: body["permission_level"] = self.permission_level if self.run_id is not None: @@ -4321,10 +4591,13 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionDatabricks: """Deserializes the ModelVersionDatabricks from a dictionary.""" return cls( creation_timestamp=d.get("creation_timestamp", None), - current_stage=_enum(d, "current_stage", Stage), + current_stage=d.get("current_stage", None), description=d.get("description", None), + email_subscription_status=_enum(d, "email_subscription_status", RegistryEmailSubscriptionType), + feature_list=_from_dict(d, "feature_list", FeatureList), last_updated_timestamp=d.get("last_updated_timestamp", None), name=d.get("name", None), + open_requests=_repeated_dict(d, "open_requests", Activity), permission_level=_enum(d, "permission_level", PermissionLevel), run_id=d.get("run_id", None), run_link=d.get("run_link", None), @@ -4338,7 +4611,12 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionDatabricks: class ModelVersionStatus(Enum): - """Current status of `model_version`""" + """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register + a new model version is pending as server performs background tasks. + + * `FAILED_REGISTRATION`: Request to register a new model version has failed. + + * `READY`: Model version is ready for use.""" FAILED_REGISTRATION = "FAILED_REGISTRATION" PENDING_REGISTRATION = "PENDING_REGISTRATION" @@ -4384,7 +4662,7 @@ class OnlineStore: name: str """The name of the online store. This is the unique identifier for the online store.""" - capacity: Optional[str] = None + capacity: str """The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" creation_time: Optional[str] = None @@ -4486,6 +4764,7 @@ class PermissionLevel(Enum): """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" + CAN_CREATE_REGISTERED_MODEL = "CAN_CREATE_REGISTERED_MODEL" CAN_EDIT = "CAN_EDIT" CAN_MANAGE = "CAN_MANAGE" CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" @@ -4498,9 +4777,8 @@ class PublishSpec: online_store: str """The name of the target online store.""" - online_table_name: Optional[str] = None - """The full three-part (catalog, schema, table) name of the online table. Auto-generated if not - specified.""" + online_table_name: str + """The full three-part (catalog, schema, table) name of the online table.""" publish_mode: Optional[PublishSpecPublishMode] = None """The publish mode of the pipeline that syncs the online table with the source table. Defaults to @@ -4617,7 +4895,6 @@ class RegisteredModelAccessControlRequest: """name of the group""" permission_level: Optional[RegisteredModelPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -4728,7 +5005,6 @@ class RegisteredModelPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[RegisteredModelPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the RegisteredModelPermission into a dictionary suitable for use as a JSON request body.""" @@ -4817,7 +5093,6 @@ class RegisteredModelPermissionsDescription: description: Optional[str] = None permission_level: Optional[RegisteredModelPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the RegisteredModelPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -4880,6 +5155,18 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsRequest: ) +class RegistryEmailSubscriptionType(Enum): + """.. note:: Experimental: This entity may change or be removed in a future release without + warning. Email subscription types for registry notifications: - `ALL_EVENTS`: Subscribed to all + events. - `DEFAULT`: Default subscription type. - `SUBSCRIBED`: Subscribed to notifications. - + `UNSUBSCRIBED`: Not subscribed to notifications.""" + + ALL_EVENTS = "ALL_EVENTS" + DEFAULT = "DEFAULT" + SUBSCRIBED = "SUBSCRIBED" + UNSUBSCRIBED = "UNSUBSCRIBED" + + @dataclass class RegistryWebhook: creation_timestamp: Optional[int] = None @@ -4932,13 +5219,6 @@ class RegistryWebhook: """Name of the model whose events would trigger this webhook.""" status: Optional[RegistryWebhookStatus] = None - """Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event.""" def as_dict(self) -> dict: """Serializes the RegistryWebhook into a dictionary suitable for use as a JSON request body.""" @@ -5034,13 +5314,15 @@ class RegistryWebhookStatus(Enum): @dataclass class RejectTransitionRequest: + """Details required to identify and reject a model version stage transition request.""" + name: str """Name of the model.""" version: str """Version of the model.""" - stage: Stage + stage: str """Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -5062,7 +5344,7 @@ def as_dict(self) -> dict: if self.name is not None: body["name"] = self.name if self.stage is not None: - body["stage"] = self.stage.value + body["stage"] = self.stage if self.version is not None: body["version"] = self.version return body @@ -5086,7 +5368,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: return cls( comment=d.get("comment", None), name=d.get("name", None), - stage=_enum(d, "stage", Stage), + stage=d.get("stage", None), version=d.get("version", None), ) @@ -5094,7 +5376,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: @dataclass class RejectTransitionRequestResponse: activity: Optional[Activity] = None - """Activity recorded for the action.""" + """New activity generated as a result of this operation.""" def as_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" @@ -6382,23 +6664,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetTagResponse: return cls() -class Stage(Enum): - """Stage of the model version. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - ARCHIVED = "Archived" - NONE = "None" - PRODUCTION = "Production" - STAGING = "Staging" - - class Status(Enum): """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -6412,42 +6677,10 @@ class Status(Enum): READY = "READY" -@dataclass -class TestRegistryWebhook: - """Test webhook response object.""" - - body: Optional[str] = None - """Body of the response from the webhook URL""" - - status_code: Optional[int] = None - """Status code returned by the webhook URL""" - - def as_dict(self) -> dict: - """Serializes the TestRegistryWebhook into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes.""" - body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhook: - """Deserializes the TestRegistryWebhook from a dictionary.""" - return cls(body=d.get("body", None), status_code=d.get("status_code", None)) - - @dataclass class TestRegistryWebhookRequest: + """Details required to test a registry webhook.""" + id: str """Webhook ID""" @@ -6481,38 +6714,47 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookRequest: @dataclass class TestRegistryWebhookResponse: - webhook: Optional[TestRegistryWebhook] = None - """Test webhook response object.""" + body: Optional[str] = None + """Body of the response from the webhook URL""" + + status_code: Optional[int] = None + """Status code returned by the webhook URL""" def as_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.webhook: - body["webhook"] = self.webhook.as_dict() + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.webhook: - body["webhook"] = self.webhook + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: """Deserializes the TestRegistryWebhookResponse from a dictionary.""" - return cls(webhook=_from_dict(d, "webhook", TestRegistryWebhook)) + return cls(body=d.get("body", None), status_code=d.get("status_code", None)) @dataclass class TransitionModelVersionStageDatabricks: + """Details required to transition a model version's stage.""" + name: str """Name of the model.""" version: str """Version of the model.""" - stage: Stage + stage: str """Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -6539,7 +6781,7 @@ def as_dict(self) -> dict: if self.name is not None: body["name"] = self.name if self.stage is not None: - body["stage"] = self.stage.value + body["stage"] = self.stage if self.version is not None: body["version"] = self.version return body @@ -6566,25 +6808,26 @@ def from_dict(cls, d: Dict[str, Any]) -> TransitionModelVersionStageDatabricks: archive_existing_versions=d.get("archive_existing_versions", None), comment=d.get("comment", None), name=d.get("name", None), - stage=_enum(d, "stage", Stage), + stage=d.get("stage", None), version=d.get("version", None), ) @dataclass class TransitionRequest: - """Transition request details.""" + """For activities, this contains the activity recorded for the action. For comments, this contains + the comment details. For transition requests, this contains the transition request details.""" available_actions: Optional[List[ActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" comment: Optional[str] = None - """User-provided comment associated with the transition request.""" + """User-provided comment associated with the activity, comment, or transition request.""" creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - to_stage: Optional[Stage] = None + to_stage: Optional[str] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: * `None`: The initial stage of a model version. @@ -6608,7 +6851,7 @@ def as_dict(self) -> dict: if self.creation_timestamp is not None: body["creation_timestamp"] = self.creation_timestamp if self.to_stage is not None: - body["to_stage"] = self.to_stage.value + body["to_stage"] = self.to_stage if self.user_id is not None: body["user_id"] = self.user_id return body @@ -6635,37 +6878,40 @@ def from_dict(cls, d: Dict[str, Any]) -> TransitionRequest: available_actions=_repeated_enum(d, "available_actions", ActivityAction), comment=d.get("comment", None), creation_timestamp=d.get("creation_timestamp", None), - to_stage=_enum(d, "to_stage", Stage), + to_stage=d.get("to_stage", None), user_id=d.get("user_id", None), ) @dataclass class TransitionStageResponse: - model_version: Optional[ModelVersionDatabricks] = None + model_version_databricks: Optional[ModelVersionDatabricks] = None + """Updated model version""" def as_dict(self) -> dict: """Serializes the TransitionStageResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() + if self.model_version_databricks: + body["model_version_databricks"] = self.model_version_databricks.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: - body["model_version"] = self.model_version + if self.model_version_databricks: + body["model_version_databricks"] = self.model_version_databricks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: """Deserializes the TransitionStageResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersionDatabricks)) + return cls(model_version_databricks=_from_dict(d, "model_version_databricks", ModelVersionDatabricks)) @dataclass class UpdateComment: + """Details required to edit a comment on a model version.""" + id: str """Unique identifier of an activity""" @@ -6699,7 +6945,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateComment: @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None - """Comment details.""" + """Updated comment object""" def as_dict(self) -> dict: """Serializes the UpdateCommentResponse into a dictionary suitable for use as a JSON request body.""" @@ -6805,20 +7051,26 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelRequest: @dataclass class UpdateModelResponse: + registered_model: Optional[Model] = None + def as_dict(self) -> dict: """Serializes the UpdateModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.registered_model: + body["registered_model"] = self.registered_model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.registered_model: + body["registered_model"] = self.registered_model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelResponse: """Deserializes the UpdateModelResponse from a dictionary.""" - return cls() + return cls(registered_model=_from_dict(d, "registered_model", Model)) @dataclass @@ -6862,24 +7114,34 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: @dataclass class UpdateModelVersionResponse: + model_version: Optional[ModelVersion] = None + """Return new version number generated for this model in registry.""" + def as_dict(self) -> dict: """Serializes the UpdateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.model_version: + body["model_version"] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelVersionResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.model_version: + body["model_version"] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionResponse: """Deserializes the UpdateModelVersionResponse from a dictionary.""" - return cls() + return cls(model_version=_from_dict(d, "model_version", ModelVersion)) @dataclass class UpdateRegistryWebhook: + """Details required to update a registry webhook. Only the fields that need to be updated should be + specified, and both `http_url_spec` and `job_spec` should not be specified in the same request.""" + id: str """Webhook ID""" @@ -6921,13 +7183,6 @@ class UpdateRegistryWebhook: job_spec: Optional[JobSpec] = None status: Optional[RegistryWebhookStatus] = None - """Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event.""" def as_dict(self) -> dict: """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" @@ -7073,20 +7328,26 @@ class UpdateRunStatus(Enum): @dataclass class UpdateWebhookResponse: + webhook: Optional[RegistryWebhook] = None + def as_dict(self) -> dict: """Serializes the UpdateWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.webhook: + body["webhook"] = self.webhook.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} + if self.webhook: + body["webhook"] = self.webhook return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWebhookResponse: """Deserializes the UpdateWebhookResponse from a dictionary.""" - return cls() + return cls(webhook=_from_dict(d, "webhook", RegistryWebhook)) class ViewType(Enum): @@ -8380,7 +8641,7 @@ def create_online_store(self, online_store: OnlineStore) -> OnlineStore: """Create an Online Feature Store. :param online_store: :class:`OnlineStore` - An OnlineStore is a logical database instance that stores and serves features online. + Online store to create. :returns: :class:`OnlineStore` """ @@ -8484,7 +8745,7 @@ def update_online_store(self, name: str, online_store: OnlineStore, update_mask: :param name: str The name of the online store. This is the unique identifier for the online store. :param online_store: :class:`OnlineStore` - An OnlineStore is a logical database instance that stores and serves features online. + Online store to update. :param update_mask: str The list of fields to update. @@ -8728,6 +8989,179 @@ def get_experiment(self, experiment_id: str) -> ForecastingExperiment: return ForecastingExperiment.from_dict(res) +class MaterializedFeaturesAPI: + """Materialized Features are columns in tables and views that can be directly used as features to train and + serve ML models.""" + + def __init__(self, api_client): + self._api = api_client + + def create_feature_tag(self, table_name: str, feature_name: str, feature_tag: FeatureTag) -> FeatureTag: + """Creates a FeatureTag. + + :param table_name: str + :param feature_name: str + :param feature_tag: :class:`FeatureTag` + + :returns: :class:`FeatureTag` + """ + body = feature_tag.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/tags", + body=body, + headers=headers, + ) + return FeatureTag.from_dict(res) + + def delete_feature_tag(self, table_name: str, feature_name: str, key: str): + """Deletes a FeatureTag. + + :param table_name: str + The name of the feature table. + :param feature_name: str + The name of the feature within the feature table. + :param key: str + The key of the tag to delete. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/tags/{key}", + headers=headers, + ) + + def get_feature_lineage(self, table_name: str, feature_name: str) -> FeatureLineage: + """Get Feature Lineage. + + :param table_name: str + The full name of the feature table in Unity Catalog. + :param feature_name: str + The name of the feature. + + :returns: :class:`FeatureLineage` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/lineage", + headers=headers, + ) + return FeatureLineage.from_dict(res) + + def get_feature_tag(self, table_name: str, feature_name: str, key: str) -> FeatureTag: + """Gets a FeatureTag. + + :param table_name: str + :param feature_name: str + :param key: str + + :returns: :class:`FeatureTag` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/tags/{key}", + headers=headers, + ) + return FeatureTag.from_dict(res) + + def list_feature_tags( + self, table_name: str, feature_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[FeatureTag]: + """Lists FeatureTags. + + :param table_name: str + :param feature_name: str + :param page_size: int (optional) + The maximum number of results to return. + :param page_token: str (optional) + Pagination token to go to the next page based on a previous query. + + :returns: Iterator over :class:`FeatureTag` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/tags", + query=query, + headers=headers, + ) + if "feature_tags" in json: + for v in json["feature_tags"]: + yield FeatureTag.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_feature_tag( + self, + table_name: str, + feature_name: str, + key: str, + feature_tag: FeatureTag, + *, + update_mask: Optional[str] = None, + ) -> FeatureTag: + """Updates a FeatureTag. + + :param table_name: str + :param feature_name: str + :param key: str + :param feature_tag: :class:`FeatureTag` + :param update_mask: str (optional) + The list of fields to update. + + :returns: :class:`FeatureTag` + """ + body = feature_tag.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/feature-store/feature-tables/{table_name}/features/{feature_name}/tags/{key}", + query=query, + body=body, + headers=headers, + ) + return FeatureTag.from_dict(res) + + class ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends using [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides @@ -8741,7 +9175,7 @@ def __init__(self, api_client): self._api = api_client def approve_transition_request( - self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None + self, name: str, version: str, stage: str, archive_existing_versions: bool, *, comment: Optional[str] = None ) -> ApproveTransitionRequestResponse: """Approves a model version stage transition request. @@ -8749,7 +9183,7 @@ def approve_transition_request( Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -8774,7 +9208,7 @@ def approve_transition_request( if name is not None: body["name"] = name if stage is not None: - body["stage"] = stage.value + body["stage"] = stage if version is not None: body["version"] = version headers = { @@ -8816,9 +9250,8 @@ def create_comment(self, name: str, version: str, comment: str) -> CreateComment def create_model( self, name: str, *, description: Optional[str] = None, tags: Optional[List[ModelTag]] = None ) -> CreateModelResponse: - """Creates a new registered model with the name specified in the request body. - - Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. + """Creates a new registered model with the name specified in the request body. Throws + `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. :param name: str Register models under this name @@ -8895,7 +9328,7 @@ def create_model_version( return CreateModelVersionResponse.from_dict(res) def create_transition_request( - self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None + self, name: str, version: str, stage: str, *, comment: Optional[str] = None ) -> CreateTransitionRequestResponse: """Creates a model version stage transition request. @@ -8903,7 +9336,7 @@ def create_transition_request( Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -8924,7 +9357,7 @@ def create_transition_request( if name is not None: body["name"] = name if stage is not None: - body["stage"] = stage.value + body["stage"] = stage if version is not None: body["version"] = version headers = { @@ -8945,9 +9378,7 @@ def create_webhook( model_name: Optional[str] = None, status: Optional[RegistryWebhookStatus] = None, ) -> CreateWebhookResponse: - """**NOTE**: This endpoint is in Public Preview. - - Creates a registry webhook. + """**NOTE:** This endpoint is in Public Preview. Creates a registry webhook. :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was @@ -8981,7 +9412,9 @@ def create_webhook( :param description: str (optional) User-specified description for the webhook. :param http_url_spec: :class:`HttpUrlSpec` (optional) + External HTTPS URL called on event trigger (by using a POST request). :param job_spec: :class:`JobSpec` (optional) + ID of the job that the webhook runs. :param model_name: str (optional) If model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models. @@ -9126,21 +9559,15 @@ def delete_model_version_tag(self, name: str, version: str, key: str): self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete-tag", query=query, headers=headers) def delete_transition_request( - self, - name: str, - version: str, - stage: DeleteTransitionRequestStage, - creator: str, - *, - comment: Optional[str] = None, - ): + self, name: str, version: str, stage: str, creator: str, *, comment: Optional[str] = None + ) -> DeleteTransitionRequestResponse: """Cancels a model version stage transition request. :param name: str Name of the model. :param version: str Version of the model. - :param stage: :class:`DeleteTransitionRequestStage` + :param stage: str Target stage of the transition request. Valid values are: * `None`: The initial stage of a model version. @@ -9156,7 +9583,7 @@ def delete_transition_request( :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`DeleteTransitionRequestResponse` """ query = {} @@ -9167,21 +9594,20 @@ def delete_transition_request( if name is not None: query["name"] = name if stage is not None: - query["stage"] = stage.value + query["stage"] = stage if version is not None: query["version"] = version headers = { "Accept": "application/json", } - self._api.do("DELETE", "/api/2.0/mlflow/transition-requests/delete", query=query, headers=headers) + res = self._api.do("DELETE", "/api/2.0/mlflow/transition-requests/delete", query=query, headers=headers) + return DeleteTransitionRequestResponse.from_dict(res) - def delete_webhook(self, *, id: Optional[str] = None): - """**NOTE:** This endpoint is in Public Preview. + def delete_webhook(self, id: str): + """**NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. - Deletes a registry webhook. - - :param id: str (optional) + :param id: str Webhook ID required to delete a registry webhook. @@ -9357,7 +9783,7 @@ def list_transition_requests(self, name: str, version: str) -> Iterator[Activity """Gets a list of all open stage transition requests for the model version. :param name: str - Name of the model. + Name of the registered model. :param version: str Version of the model. @@ -9381,19 +9807,48 @@ def list_webhooks( self, *, events: Optional[List[RegistryWebhookEvent]] = None, + max_results: Optional[int] = None, model_name: Optional[str] = None, page_token: Optional[str] = None, ) -> Iterator[RegistryWebhook]: - """**NOTE:** This endpoint is in Public Preview. - - Lists all registry webhooks. + """**NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. :param events: List[:class:`RegistryWebhookEvent`] (optional) + Events that trigger the webhook. * `MODEL_VERSION_CREATED`: A new model version was created for the + associated model. + + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. + + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. + + * `COMMENT_CREATED`: A user wrote a comment on a registered model. + + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be + specified for a registry-wide webhook, which can be created by not specifying a model name in the + create request. + + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. + + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. + + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. + + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. + + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to + staging. + + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to + production. + + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. + If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. + :param max_results: int (optional) :param model_name: str (optional) - If not specified, all webhooks associated with the specified events are listed, regardless of their - associated model. + Registered model name If not specified, all webhooks associated with the specified events are + listed, regardless of their associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch @@ -9403,6 +9858,8 @@ def list_webhooks( query = {} if events is not None: query["events"] = [v.value for v in events] + if max_results is not None: + query["max_results"] = max_results if model_name is not None: query["model_name"] = model_name if page_token is not None: @@ -9421,7 +9878,7 @@ def list_webhooks( query["page_token"] = json["next_page_token"] def reject_transition_request( - self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None + self, name: str, version: str, stage: str, *, comment: Optional[str] = None ) -> RejectTransitionRequestResponse: """Rejects a model version stage transition request. @@ -9429,7 +9886,7 @@ def reject_transition_request( Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -9450,7 +9907,7 @@ def reject_transition_request( if name is not None: body["name"] = name if stage is not None: - body["stage"] = stage.value + body["stage"] = stage if version is not None: body["version"] = version headers = { @@ -9672,9 +10129,7 @@ def set_permissions( def test_registry_webhook( self, id: str, *, event: Optional[RegistryWebhookEvent] = None ) -> TestRegistryWebhookResponse: - """**NOTE:** This endpoint is in Public Preview. - - Tests a registry webhook. + """**NOTE:** This endpoint is in Public Preview. Tests a registry webhook. :param id: str Webhook ID @@ -9698,10 +10153,10 @@ def test_registry_webhook( return TestRegistryWebhookResponse.from_dict(res) def transition_stage( - self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None + self, name: str, version: str, stage: str, archive_existing_versions: bool, *, comment: Optional[str] = None ) -> TransitionStageResponse: """Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] - that also accepts a comment associated with the transition to be recorded.", + that also accepts a comment associated with the transition to be recorded. [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -9709,7 +10164,7 @@ def transition_stage( Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -9734,7 +10189,7 @@ def transition_stage( if name is not None: body["name"] = name if stage is not None: - body["stage"] = stage.value + body["stage"] = stage if version is not None: body["version"] = version headers = { @@ -9770,7 +10225,7 @@ def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: res = self._api.do("PATCH", "/api/2.0/mlflow/comments/update", body=body, headers=headers) return UpdateCommentResponse.from_dict(res) - def update_model(self, name: str, *, description: Optional[str] = None): + def update_model(self, name: str, *, description: Optional[str] = None) -> UpdateModelResponse: """Updates a registered model. :param name: str @@ -9778,7 +10233,7 @@ def update_model(self, name: str, *, description: Optional[str] = None): :param description: str (optional) If provided, updates the description for this `registered_model`. - + :returns: :class:`UpdateModelResponse` """ body = {} if description is not None: @@ -9790,9 +10245,12 @@ def update_model(self, name: str, *, description: Optional[str] = None): "Content-Type": "application/json", } - self._api.do("PATCH", "/api/2.0/mlflow/registered-models/update", body=body, headers=headers) + res = self._api.do("PATCH", "/api/2.0/mlflow/registered-models/update", body=body, headers=headers) + return UpdateModelResponse.from_dict(res) - def update_model_version(self, name: str, version: str, *, description: Optional[str] = None): + def update_model_version( + self, name: str, version: str, *, description: Optional[str] = None + ) -> UpdateModelVersionResponse: """Updates the model version. :param name: str @@ -9802,7 +10260,7 @@ def update_model_version(self, name: str, version: str, *, description: Optional :param description: str (optional) If provided, updates the description for this `registered_model`. - + :returns: :class:`UpdateModelVersionResponse` """ body = {} if description is not None: @@ -9816,7 +10274,8 @@ def update_model_version(self, name: str, version: str, *, description: Optional "Content-Type": "application/json", } - self._api.do("PATCH", "/api/2.0/mlflow/model-versions/update", body=body, headers=headers) + res = self._api.do("PATCH", "/api/2.0/mlflow/model-versions/update", body=body, headers=headers) + return UpdateModelVersionResponse.from_dict(res) def update_permissions( self, @@ -9855,10 +10314,8 @@ def update_webhook( http_url_spec: Optional[HttpUrlSpec] = None, job_spec: Optional[JobSpec] = None, status: Optional[RegistryWebhookStatus] = None, - ): - """**NOTE:** This endpoint is in Public Preview. - - Updates a registry webhook. + ) -> UpdateWebhookResponse: + """**NOTE:** This endpoint is in Public Preview. Updates a registry webhook. :param id: str Webhook ID @@ -9896,15 +10353,8 @@ def update_webhook( :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) - Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real - event. - + :returns: :class:`UpdateWebhookResponse` """ body = {} if description is not None: @@ -9924,4 +10374,5 @@ def update_webhook( "Content-Type": "application/json", } - self._api.do("PATCH", "/api/2.0/mlflow/registry-webhooks/update", body=body, headers=headers) + res = self._api.do("PATCH", "/api/2.0/mlflow/registry-webhooks/update", body=body, headers=headers) + return UpdateWebhookResponse.from_dict(res) diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 174ee21a7..a1d36a80c 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -366,7 +366,6 @@ class FederationPolicy: the request URL.""" oidc_policy: Optional[OidcFederationPolicy] = None - """Specifies the policy to use for validating OIDC claims in your federated tokens.""" policy_id: Optional[str] = None """The ID of the federation policy.""" diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ca0a7604e..d848d4557 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -98,12 +98,6 @@ class CreatePipeline: pipeline execution.""" run_as: Optional[RunAs] = None - """Write-only setting, available only in Create/Update calls. Specifies the user or service - principal that the pipeline runs as. If not specified, the pipeline runs as the user who created - the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error - is thrown.""" schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" @@ -510,12 +504,6 @@ class EditPipeline: pipeline execution.""" run_as: Optional[RunAs] = None - """Write-only setting, available only in Create/Update calls. Specifies the user or service - principal that the pipeline runs as. If not specified, the pipeline runs as the user who created - the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error - is thrown.""" schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" @@ -922,6 +910,11 @@ class GetPipelineResponse: pipeline_id: Optional[str] = None """The ID of the pipeline.""" + run_as: Optional[RunAs] = None + """The user or service principal that the pipeline runs as, if specified in the request. This field + indicates the explicit configuration of `run_as` for the pipeline. To find the value in all + cases, explicit or implicit, use `run_as_user_name`.""" + run_as_user_name: Optional[str] = None """Username of the user that the pipeline will run on behalf of.""" @@ -952,6 +945,8 @@ def as_dict(self) -> dict: body["name"] = self.name if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id + if self.run_as: + body["run_as"] = self.run_as.as_dict() if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.spec: @@ -981,6 +976,8 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id + if self.run_as: + body["run_as"] = self.run_as if self.run_as_user_name is not None: body["run_as_user_name"] = self.run_as_user_name if self.spec: @@ -1002,6 +999,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), name=d.get("name", None), pipeline_id=d.get("pipeline_id", None), + run_as=_from_dict(d, "run_as", RunAs), run_as_user_name=d.get("run_as_user_name", None), spec=_from_dict(d, "spec", PipelineSpec), state=_enum(d, "state", PipelineState), @@ -1211,6 +1209,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: class IngestionSourceType(Enum): + BIGQUERY = "BIGQUERY" DYNAMICS365 = "DYNAMICS365" GA4_RAW_DATA = "GA4_RAW_DATA" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" @@ -1621,7 +1620,6 @@ class PipelineAccessControlRequest: """name of the group""" permission_level: Optional[PipelinePermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -2195,7 +2193,6 @@ class PipelinePermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[PipelinePermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body.""" @@ -2283,7 +2280,6 @@ class PipelinePermissionsDescription: description: Optional[str] = None permission_level: Optional[PipelinePermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -2630,7 +2626,6 @@ class PipelineStateInfo: owner.""" state: Optional[PipelineState] = None - """The pipeline state.""" def as_dict(self) -> dict: """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body.""" @@ -3101,7 +3096,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StackFrame: @dataclass class StartUpdate: cause: Optional[StartUpdateCause] = None - """What triggered this update.""" full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" @@ -3378,6 +3372,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: class TableSpecificConfigScdType(Enum): """The SCD type to use to ingest the table.""" + APPEND_ONLY = "APPEND_ONLY" SCD_TYPE_1 = "SCD_TYPE_1" SCD_TYPE_2 = "SCD_TYPE_2" @@ -3528,7 +3523,6 @@ class UpdateStateInfo: creation_time: Optional[str] = None state: Optional[UpdateStateInfoState] = None - """The update state.""" update_id: Optional[str] = None @@ -3715,11 +3709,6 @@ def create( Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. :param run_as: :class:`RunAs` (optional) - Write-only setting, available only in Create/Update calls. Specifies the user or service principal - that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is - thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. :param serverless: bool (optional) @@ -4072,7 +4061,6 @@ def start_update( :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) - What triggered this update. :param full_refresh: bool (optional) If true, this update will reset all tables before running. :param full_refresh_selection: List[str] (optional) @@ -4223,11 +4211,6 @@ def update( Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. :param run_as: :class:`RunAs` (optional) - Write-only setting, available only in Create/Update calls. Specifies the user or service principal - that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is - thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. :param serverless: bool (optional) diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index bdae323fb..dce0a171d 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -133,7 +133,6 @@ class CloudResourceContainer: """The general workspace configurations that are specific to cloud providers.""" gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None - """The general workspace configurations that are specific to Google Cloud.""" def as_dict(self) -> dict: """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body.""" @@ -356,8 +355,6 @@ class CreateNetworkRequest: """The human-readable name of the network configuration.""" gcp_network_info: Optional[GcpNetworkInfo] = None - """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges).""" security_group_ids: Optional[List[str]] = None """IDs of one to five security groups associated with this network. Security group IDs **cannot** @@ -368,10 +365,6 @@ class CreateNetworkRequest: multiple network configurations.""" vpc_endpoints: Optional[NetworkVpcEndpoints] = None - """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - [AWS PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" vpc_id: Optional[str] = None """The ID of the VPC associated with this network. VPC IDs can be used in multiple network @@ -430,7 +423,6 @@ class CreateStorageConfigurationRequest: """The human-readable name of the storage configuration.""" root_bucket_info: RootBucketInfo - """Root S3 bucket information.""" def as_dict(self) -> dict: """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body.""" @@ -468,7 +460,6 @@ class CreateVpcEndpointRequest: """The ID of the VPC endpoint object in AWS.""" gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None - """The Google Cloud specific information for this Private Service Connect endpoint.""" region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" @@ -523,7 +514,6 @@ class CreateWorkspaceRequest: to `gcp`.""" cloud_resource_container: Optional[CloudResourceContainer] = None - """The general workspace configurations that are specific to cloud providers.""" credentials_id: Optional[str] = None """ID of the workspace's credential configuration object.""" @@ -559,27 +549,8 @@ class CreateWorkspaceRequest: with the pattern `dbc-xxxxxxxx-xxxx`.""" gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None - """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP - range configurations must be mutually exclusive. An attempt to create a workspace fails if - Databricks detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" gke_config: Optional[GkeConfig] = None - """The configurations for the GKE cluster of a Databricks workspace.""" is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" @@ -597,9 +568,6 @@ class CreateWorkspaceRequest: network_id: Optional[str] = None pricing_tier: Optional[PricingTier] = None - """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing""" private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be @@ -1259,8 +1227,6 @@ class Network: """Array of error messages about the network configuration.""" gcp_network_info: Optional[GcpNetworkInfo] = None - """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges).""" network_id: Optional[str] = None """The Databricks network configuration ID.""" @@ -1273,18 +1239,12 @@ class Network: subnet_ids: Optional[List[str]] = None vpc_endpoints: Optional[NetworkVpcEndpoints] = None - """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - [AWS PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" vpc_id: Optional[str] = None """The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple networks.""" vpc_status: Optional[VpcStatus] = None - """The status of this network configuration object in terms of its use in a workspace: * - `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" warning_messages: Optional[List[NetworkWarning]] = None """Array of warning messages about the network configuration.""" @@ -1380,8 +1340,6 @@ class NetworkHealth: """Details of the error.""" error_type: Optional[ErrorType] = None - """The AWS resource associated with this error: credentials, VPC, subnet, security group, or - network ACL.""" def as_dict(self) -> dict: """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body.""" @@ -1451,7 +1409,6 @@ class NetworkWarning: """Details of the warning.""" warning_type: Optional[WarningType] = None - """The AWS resource associated with this warning: a subnet or a security group.""" def as_dict(self) -> dict: """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body.""" @@ -1510,11 +1467,6 @@ class PrivateAccessSettings: """An array of Databricks VPC endpoint IDs.""" private_access_level: Optional[PrivateAccessLevel] = None - """The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`.""" private_access_settings_id: Optional[str] = None """Databricks private access settings ID.""" @@ -1637,7 +1589,6 @@ class StorageConfiguration: """Time in epoch milliseconds when the storage configuration was created.""" root_bucket_info: Optional[RootBucketInfo] = None - """Root S3 bucket information.""" storage_configuration_id: Optional[str] = None """Databricks storage configuration ID.""" @@ -1869,11 +1820,6 @@ class UpsertPrivateAccessSettingsRequest: [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html""" private_access_level: Optional[PrivateAccessLevel] = None - """The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`.""" private_access_settings_id: Optional[str] = None """Databricks Account API private access settings ID.""" @@ -1951,7 +1897,6 @@ class VpcEndpoint: """The ID of the VPC endpoint object in AWS.""" gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None - """The Google Cloud specific information for this Private Service Connect endpoint.""" region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" @@ -1963,10 +1908,6 @@ class VpcEndpoint: [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html""" use_case: Optional[EndpointUseCase] = None - """This enumeration represents the type of Databricks VPC [endpoint service] that was used when - creating this VPC endpoint. - - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" vpc_endpoint_id: Optional[str] = None """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not @@ -2073,7 +2014,6 @@ class Workspace: """The cloud name. This field always has the value `gcp`.""" cloud_resource_container: Optional[CloudResourceContainer] = None - """The general workspace configurations that are specific to cloud providers.""" creation_time: Optional[int] = None """Time in epoch milliseconds when the workspace was created.""" @@ -2097,27 +2037,8 @@ class Workspace: workspace is not for a external customer, then external_customer_info is empty.""" gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None - """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP - range configurations must be mutually exclusive. An attempt to create a workspace fails if - Databricks detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" gke_config: Optional[GkeConfig] = None - """The configurations for the GKE cluster of a Databricks workspace.""" is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" @@ -2134,9 +2055,6 @@ class Workspace: the network is a customer-managed network.""" pricing_tier: Optional[PricingTier] = None - """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing""" private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. You must @@ -2161,8 +2079,6 @@ class Workspace: """The human-readable name of the workspace.""" workspace_status: Optional[WorkspaceStatus] = None - """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - initially. Continue to check the status until the status is `RUNNING`.""" workspace_status_message: Optional[str] = None """Message describing the current workspace status.""" @@ -2570,8 +2486,6 @@ def create( :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) - The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges). :param security_group_ids: List[str] (optional) IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations. @@ -2579,10 +2493,6 @@ def create( IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple network configurations. :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) - If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS - PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. @@ -2707,11 +2617,6 @@ def create( [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) - The private access level controls which VPC endpoints can connect to the UI or API of any workspace - that attaches this private access settings object. * `ACCOUNT` level access (the default) allows - only VPC endpoints that are registered in your Databricks account connect to your workspace. * - `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, - see `allowed_vpc_endpoint_ids`. :param public_access_enabled: bool (optional) Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end @@ -2853,11 +2758,6 @@ def replace( [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) - The private access level controls which VPC endpoints can connect to the UI or API of any workspace - that attaches this private access settings object. * `ACCOUNT` level access (the default) allows - only VPC endpoints that are registered in your Databricks account connect to your workspace. * - `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, - see `allowed_vpc_endpoint_ids`. :param public_access_enabled: bool (optional) Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end @@ -2912,7 +2812,6 @@ def create(self, storage_configuration_name: str, root_bucket_info: RootBucketIn :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` - Root S3 bucket information. :returns: :class:`StorageConfiguration` """ @@ -3018,7 +2917,6 @@ def create( :param aws_vpc_endpoint_id: str (optional) The ID of the VPC endpoint object in AWS. :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional) - The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. @@ -3189,7 +3087,6 @@ def create( The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to `gcp`. :param cloud_resource_container: :class:`CloudResourceContainer` (optional) - The general workspace configurations that are specific to cloud providers. :param credentials_id: str (optional) ID of the workspace's credential configuration object. :param custom_tags: Dict[str,str] (optional) @@ -3221,26 +3118,7 @@ def create( If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) - The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It - is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range - configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks - detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP - addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) - The configurations for the GKE cluster of a Databricks workspace. :param is_no_public_ip_enabled: bool (optional) Whether no public IP is enabled for the workspace. :param location: str (optional) @@ -3251,9 +3129,6 @@ def create( history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) - The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index 8daed836d..a6fab7023 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -63,24 +63,6 @@ class AnomalyDetectionRunStatus(Enum): ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" -@dataclass -class DeleteQualityMonitorResponse: - def as_dict(self) -> dict: - """Serializes the DeleteQualityMonitorResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteQualityMonitorResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteQualityMonitorResponse: - """Deserializes the DeleteQualityMonitorResponse from a dictionary.""" - return cls() - - @dataclass class ListQualityMonitorResponse: next_page_token: Optional[str] = None diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index d0bd2dcdd..bc98f9ecf 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -306,8 +306,12 @@ class AiGatewayRateLimit: """Renewal period field for a rate limit. Currently, only 'minute' is supported.""" key: Optional[AiGatewayRateLimitKey] = None - """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' - being the default if not specified.""" + """Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' + are supported, with 'endpoint' being the default if not specified.""" + + principal: Optional[str] = None + """Principal field for a user, user group, or service principal to apply rate limiting to. Accepts + a user email, group name, or service principal application ID.""" def as_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body.""" @@ -316,6 +320,8 @@ def as_dict(self) -> dict: body["calls"] = self.calls if self.key is not None: body["key"] = self.key.value + if self.principal is not None: + body["principal"] = self.principal if self.renewal_period is not None: body["renewal_period"] = self.renewal_period.value return body @@ -327,6 +333,8 @@ def as_shallow_dict(self) -> dict: body["calls"] = self.calls if self.key is not None: body["key"] = self.key + if self.principal is not None: + body["principal"] = self.principal if self.renewal_period is not None: body["renewal_period"] = self.renewal_period return body @@ -337,6 +345,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AiGatewayRateLimit: return cls( calls=d.get("calls", None), key=_enum(d, "key", AiGatewayRateLimitKey), + principal=d.get("principal", None), renewal_period=_enum(d, "renewal_period", AiGatewayRateLimitRenewalPeriod), ) @@ -344,7 +353,9 @@ def from_dict(cls, d: Dict[str, Any]) -> AiGatewayRateLimit: class AiGatewayRateLimitKey(Enum): ENDPOINT = "endpoint" + SERVICE_PRINCIPAL = "service_principal" USER = "user" + USER_GROUP = "user_group" class AiGatewayRateLimitRenewalPeriod(Enum): @@ -919,6 +930,8 @@ class CreateServingEndpoint: config: Optional[EndpointCoreConfigInput] = None """The core config of the serving endpoint.""" + description: Optional[str] = None + rate_limits: Optional[List[RateLimit]] = None """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.""" @@ -938,6 +951,8 @@ def as_dict(self) -> dict: body["budget_policy_id"] = self.budget_policy_id if self.config: body["config"] = self.config.as_dict() + if self.description is not None: + body["description"] = self.description if self.name is not None: body["name"] = self.name if self.rate_limits: @@ -957,6 +972,8 @@ def as_shallow_dict(self) -> dict: body["budget_policy_id"] = self.budget_policy_id if self.config: body["config"] = self.config + if self.description is not None: + body["description"] = self.description if self.name is not None: body["name"] = self.name if self.rate_limits: @@ -974,6 +991,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateServingEndpoint: ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), budget_policy_id=d.get("budget_policy_id", None), config=_from_dict(d, "config", EndpointCoreConfigInput), + description=d.get("description", None), name=d.get("name", None), rate_limits=_repeated_dict(d, "rate_limits", RateLimit), route_optimized=d.get("route_optimized", None), @@ -2945,16 +2963,20 @@ class RateLimitRenewalPeriod(Enum): @dataclass class Route: - served_model_name: str - """The name of the served model this route configures traffic for.""" - traffic_percentage: int """The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.""" + served_entity_name: Optional[str] = None + + served_model_name: Optional[str] = None + """The name of the served model this route configures traffic for.""" + def as_dict(self) -> dict: """Serializes the Route into a dictionary suitable for use as a JSON request body.""" body = {} + if self.served_entity_name is not None: + body["served_entity_name"] = self.served_entity_name if self.served_model_name is not None: body["served_model_name"] = self.served_model_name if self.traffic_percentage is not None: @@ -2964,6 +2986,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the Route into a shallow dictionary of its immediate attributes.""" body = {} + if self.served_entity_name is not None: + body["served_entity_name"] = self.served_entity_name if self.served_model_name is not None: body["served_model_name"] = self.served_model_name if self.traffic_percentage is not None: @@ -2974,7 +2998,9 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Route: """Deserializes the Route from a dictionary.""" return cls( - served_model_name=d.get("served_model_name", None), traffic_percentage=d.get("traffic_percentage", None) + served_entity_name=d.get("served_entity_name", None), + served_model_name=d.get("served_model_name", None), + traffic_percentage=d.get("traffic_percentage", None), ) @@ -3164,8 +3190,6 @@ class ServedEntityOutput: external_model later. The task type of all external models within an endpoint must be the same.""" foundation_model: Optional[FoundationModel] = None - """All fields are not sensitive as they are hard-coded in the system and made available to - customers.""" instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" @@ -3331,8 +3355,6 @@ class ServedEntitySpec: external_model: Optional[ExternalModel] = None foundation_model: Optional[FoundationModel] = None - """All fields are not sensitive as they are hard-coded in the system and made available to - customers.""" name: Optional[str] = None @@ -3903,7 +3925,6 @@ class ServingEndpointAccessControlRequest: """name of the group""" permission_level: Optional[ServingEndpointPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -4179,7 +4200,6 @@ class ServingEndpointPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[ServingEndpointPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ServingEndpointPermission into a dictionary suitable for use as a JSON request body.""" @@ -4266,7 +4286,6 @@ class ServingEndpointPermissionsDescription: description: Optional[str] = None permission_level: Optional[ServingEndpointPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the ServingEndpointPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -4531,6 +4550,7 @@ def create( ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, + description: Optional[str] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None, @@ -4548,6 +4568,7 @@ def create( The budget policy to be applied to the serving endpoint. :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. + :param description: str (optional) :param rate_limits: List[:class:`RateLimit`] (optional) Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. @@ -4567,6 +4588,8 @@ def create( body["budget_policy_id"] = budget_policy_id if config is not None: body["config"] = config.as_dict() + if description is not None: + body["description"] = description if name is not None: body["name"] = name if rate_limits is not None: @@ -4594,6 +4617,7 @@ def create_and_wait( ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, + description: Optional[str] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None, @@ -4603,6 +4627,7 @@ def create_and_wait( ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, + description=description, name=name, rate_limits=rate_limits, route_optimized=route_optimized, diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index b6463fddb..e8746a745 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -352,11 +352,6 @@ class ClusterAutoRestartMessage: enabled: Optional[bool] = None enablement_details: Optional[ClusterAutoRestartMessageEnablementDetails] = None - """Contains an information about the enablement status judging (e.g. whether the enterprise tier is - enabled) This is only additional information that MUST NOT be used to decide whether the setting - is enabled or not. This is intended to use only for purposes like showing an error message to - the customer with the additional details. For example, using these details we can check why - exactly the feature is disabled for this customer.""" maintenance_window: Optional[ClusterAutoRestartMessageMaintenanceWindow] = None @@ -615,7 +610,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: @dataclass class ComplianceSecurityProfileSetting: compliance_security_profile_workspace: ComplianceSecurityProfile - """SHIELD feature: CSP""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -745,10 +739,6 @@ class CreateIpAccessList: """Label for the IP access list. This **cannot** be empty.""" list_type: ListType - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" ip_addresses: Optional[List[str]] = None @@ -789,7 +779,6 @@ class CreateIpAccessListResponse: """An IP access list was successfully created.""" ip_access_list: Optional[IpAccessListInfo] = None - """Definition of an IP Access list""" def as_dict(self) -> dict: """Serializes the CreateIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" @@ -1138,7 +1127,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CspEnablementAccount: @dataclass class CspEnablementAccountSetting: csp_enablement_account: CspEnablementAccount - """Account level policy for CSP""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -1745,42 +1733,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteLlmProxyPartnerPoweredWorkspaceRe return cls(etag=d.get("etag", None)) -@dataclass -class DeleteNetworkConnectivityConfigurationResponse: - def as_dict(self) -> dict: - """Serializes the DeleteNetworkConnectivityConfigurationResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteNetworkConnectivityConfigurationResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkConnectivityConfigurationResponse: - """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteNetworkPolicyRpcResponse: - def as_dict(self) -> dict: - """Serializes the DeleteNetworkPolicyRpcResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteNetworkPolicyRpcResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkPolicyRpcResponse: - """Deserializes the DeleteNetworkPolicyRpcResponse from a dictionary.""" - return cls() - - @dataclass class DeletePersonalComputeSettingResponse: """The etag is returned.""" @@ -2093,11 +2045,6 @@ class EgressNetworkPolicyInternetAccessPolicy: """Optional. If not specified, assume the policy is enforced for all workloads.""" restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None - """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: - Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can - only access explicitly allowed internet and storage destinations, as well as UC connections and - external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via - private link.""" def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body.""" @@ -2151,10 +2098,6 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestination: protocol: Optional[ EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol ] = None - """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP - filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be - set to TCP by default and hidden from the user. In the future, users may be able to select HTTP - filtering (i.e. SNI based filtering, filtering by FQDN).""" type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None @@ -2785,7 +2728,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EnhancedSecurityMonitoring: @dataclass class EnhancedSecurityMonitoringSetting: enhanced_security_monitoring_workspace: EnhancedSecurityMonitoring - """SHIELD feature: ESM""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -2864,7 +2806,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EsmEnablementAccount: @dataclass class EsmEnablementAccountSetting: esm_enablement_account: EsmEnablementAccount - """Account level policy for ESM""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -3049,7 +2990,6 @@ class FetchIpAccessListResponse: """An IP access list was successfully returned.""" ip_access_list: Optional[IpAccessListInfo] = None - """Definition of an IP Access list""" def as_dict(self) -> dict: """Serializes the FetchIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" @@ -3141,7 +3081,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenericWebhookConfig: @dataclass class GetIpAccessListResponse: ip_access_list: Optional[IpAccessListInfo] = None - """Definition of an IP Access list""" def as_dict(self) -> dict: """Serializes the GetIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" @@ -3265,10 +3204,6 @@ class IpAccessListInfo: """Universally unique identifier (UUID) of the IP access list.""" list_type: Optional[ListType] = None - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" updated_at: Optional[int] = None """Update timestamp in milliseconds.""" @@ -4052,12 +3987,8 @@ class NccEgressDefaultRules: """Default rules don't have specific targets.""" aws_stable_ip_rule: Optional[NccAwsStableIpRule] = None - """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to - allow traffic from your Databricks workspace.""" azure_service_endpoint_rule: Optional[NccAzureServiceEndpointRule] = None - """The stable Azure service endpoints. You can configure the firewall of your Azure resources to - allow traffic from your Databricks serverless compute resources.""" def as_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a dictionary suitable for use as a JSON request body.""" @@ -4533,11 +4464,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PartitionId: @dataclass class PersonalComputeMessage: value: PersonalComputeMessageEnum - """ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing - all users to create single-machine compute resources. DELEGATE: Moves access control for the - Personal Compute default policy to individual workspaces and requires a workspace’s users or - groups to be added to the ACLs of that workspace’s Personal Compute default policy before they - will be able to create compute resources through that policy.""" def as_dict(self) -> dict: """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body.""" @@ -4679,10 +4605,6 @@ class ReplaceIpAccessList: """Label for the IP access list. This **cannot** be empty.""" list_type: ListType - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" enabled: bool """Specifies whether this IP access list is enabled.""" @@ -5006,7 +4928,6 @@ class TokenAccessControlRequest: """name of the group""" permission_level: Optional[TokenPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -5208,7 +5129,6 @@ class TokenPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[TokenPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the TokenPermission into a dictionary suitable for use as a JSON request body.""" @@ -5293,7 +5213,6 @@ class TokenPermissionsDescription: description: Optional[str] = None permission_level: Optional[TokenPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the TokenPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -5729,13 +5648,6 @@ class UpdateDefaultNamespaceSettingRequest: """This should always be set to true for Settings API. Added for AIP compliance.""" setting: DefaultNamespaceSetting - """This represents the setting configuration for the default namespace in the Databricks workspace. - Setting the default catalog for the workspace determines the catalog that is used when queries - do not reference a fully qualified 3 level name. For example, if the default catalog is set to - 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - applies when using Unity Catalog-enabled compute.""" field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). @@ -6212,10 +6124,6 @@ class UpdateIpAccessList: """Label for the IP access list. This **cannot** be empty.""" list_type: Optional[ListType] = None - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" def as_dict(self) -> dict: """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body.""" @@ -6766,10 +6674,6 @@ def create( :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) :returns: :class:`CreateIpAccessListResponse` @@ -6863,10 +6767,6 @@ def replace( :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) @@ -6924,10 +6824,6 @@ def update( :param label: str (optional) Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. """ @@ -7657,13 +7553,6 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_ma :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` - This represents the setting configuration for the default namespace in the Databricks workspace. - Setting the default catalog for the workspace determines the catalog that is used when queries do - not reference a fully qualified 3 level name. For example, if the default catalog is set to - 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - applies when using Unity Catalog-enabled compute. :param field_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., @@ -8484,10 +8373,6 @@ def create( :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) :returns: :class:`CreateIpAccessListResponse` @@ -8576,10 +8461,6 @@ def replace( :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) @@ -8633,10 +8514,6 @@ def update( :param label: str (optional) Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. """ @@ -8933,7 +8810,6 @@ def create_network_connectivity_configuration( [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` - Properties of the new network connectivity configuration. :returns: :class:`NetworkConnectivityConfiguration` """ @@ -8964,8 +8840,6 @@ def create_private_endpoint_rule( :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` - Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal - after initialization. :returns: :class:`NccPrivateEndpointRule` """ @@ -9157,8 +9031,6 @@ def update_private_endpoint_rule( :param private_endpoint_rule_id: str Your private endpoint rule ID. :param private_endpoint_rule: :class:`UpdatePrivateEndpointRule` - Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal - after initialization. :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., @@ -9203,6 +9075,7 @@ def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> Acc environment. :param network_policy: :class:`AccountNetworkPolicy` + Network policy configuration details. :returns: :class:`AccountNetworkPolicy` """ @@ -9287,6 +9160,7 @@ def update_network_policy_rpc( :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` + Updated network policy configuration details. :returns: :class:`AccountNetworkPolicy` """ @@ -10136,6 +10010,7 @@ def update_workspace_network_option_rpc( :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` + The network option details for the workspace. :returns: :class:`WorkspaceNetworkOption` """ diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index ca52c3a99..04ef7b94e 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -61,7 +61,6 @@ class CreateProvider: """The name of the Provider.""" authentication_type: AuthenticationType - """The delta sharing authentication type.""" comment: Optional[str] = None """Description about the provider.""" @@ -113,7 +112,6 @@ class CreateRecipient: """Name of Recipient.""" authentication_type: AuthenticationType - """The delta sharing authentication type.""" comment: Optional[str] = None """Description about the recipient.""" @@ -265,10 +263,8 @@ class DeltaSharingDependency: """Represents a UC dependency.""" function: Optional[DeltaSharingFunctionDependency] = None - """A Function in UC as a dependency.""" table: Optional[DeltaSharingTableDependency] = None - """A Table in UC as a dependency.""" def as_dict(self) -> dict: """Serializes the DeltaSharingDependency into a dictionary suitable for use as a JSON request body.""" @@ -1405,7 +1401,8 @@ class Privilege(Enum): @dataclass class PrivilegeAssignment: principal: Optional[str] = None - """The principal (user email address or group name).""" + """The principal (user email address or group name). For deleted principals, `principal` is empty + while `principal_id` is populated.""" privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" @@ -1437,7 +1434,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: @dataclass class ProviderInfo: authentication_type: Optional[AuthenticationType] = None - """The delta sharing authentication type.""" cloud: Optional[str] = None """Cloud vendor of the provider's UC metastore. This field is only present when the @@ -1607,7 +1603,6 @@ class RecipientInfo: retrieved.""" authentication_type: Optional[AuthenticationType] = None - """The delta sharing authentication type.""" cloud: Optional[str] = None """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the @@ -2968,7 +2963,6 @@ def create( :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` - The delta sharing authentication type. :param comment: str (optional) Description about the provider. :param recipient_profile_str: str (optional) @@ -3308,6 +3302,7 @@ def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPol :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` + Name of the policy. This is the name of the policy to be created. :returns: :class:`FederationPolicy` """ @@ -3476,7 +3471,6 @@ def create( :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` - The delta sharing authentication type. :param comment: str (optional) Description about the recipient. :param data_recipient_global_metastore_id: str (optional) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index bbb3a2d90..d8b9e5089 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -665,7 +665,10 @@ class AlertV2: """Text of the query to be run.""" run_as_user_name: Optional[str] = None - """The run as username. This field is set to "Unavailable" if the user has been deleted.""" + """The run as username or application ID of service principal. This field is set to "Unavailable" + if the user has been deleted. On Create and Update, this field can be set to application ID of + an active service principal. Setting this field requires the servicePrincipal/user role. If not + specified it'll default to be request user.""" schedule: Optional[CronSchedule] = None @@ -1700,16 +1703,16 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequestQuery: class CreateQueryVisualizationsLegacyRequest: """Add visualization to a query""" + options: Any + """The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON.""" + query_id: str """The identifier returned by :method:queries/create""" type: str """The type of visualization: chart, table, pivot table, and so on.""" - options: Any - """The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON.""" - description: Optional[str] = None """A short description of this visualization. This is not displayed in the UI.""" @@ -1902,7 +1905,6 @@ class CreateWarehouseRequest: Supported values: - Must be unique within an org. - Must be less than 100 characters.""" spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -1911,8 +1913,6 @@ class CreateWarehouseRequest: Supported values: - Number of tags < 45.""" warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" def as_dict(self) -> dict: """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" @@ -2040,9 +2040,6 @@ class CreateWidget: width: int """Width of a widget""" - id: Optional[str] = None - """Widget ID returned by :method:dashboardwidgets/create""" - text: Optional[str] = None """If this is a textbox widget, the application displays this text. This field is ignored if the widget contains a visualization in the `visualization` field.""" @@ -2055,8 +2052,6 @@ def as_dict(self) -> dict: body = {} if self.dashboard_id is not None: body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id if self.options: body["options"] = self.options.as_dict() if self.text is not None: @@ -2072,8 +2067,6 @@ def as_shallow_dict(self) -> dict: body = {} if self.dashboard_id is not None: body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id if self.options: body["options"] = self.options if self.text is not None: @@ -2089,7 +2082,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateWidget: """Deserializes the CreateWidget from a dictionary.""" return cls( dashboard_id=d.get("dashboard_id", None), - id=d.get("id", None), options=_from_dict(d, "options", WidgetOptions), text=d.get("text", None), visualization_id=d.get("visualization_id", None), @@ -2865,7 +2857,6 @@ class EditWarehouseRequest: Supported values: - Must be unique within an org. - Must be less than 100 characters.""" spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -2874,8 +2865,6 @@ class EditWarehouseRequest: Supported values: - Number of tags < 45.""" warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" def as_dict(self) -> dict: """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body.""" @@ -3055,7 +3044,6 @@ class EndpointHealth: """Deprecated. split into summary and details for security""" status: Optional[Status] = None - """Health status of the warehouse.""" summary: Optional[str] = None """A short summary of the health status in case of degraded/failed warehouses.""" @@ -3178,10 +3166,8 @@ class EndpointInfo: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" state: Optional[State] = None - """State of the warehouse""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -3984,10 +3970,8 @@ class GetWarehouseResponse: """ODBC parameters for the SQL warehouse""" spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" state: Optional[State] = None - """State of the warehouse""" tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS @@ -3996,8 +3980,6 @@ class GetWarehouseResponse: Supported values: - Number of tags < 45.""" warehouse_type: Optional[GetWarehouseResponseWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" def as_dict(self) -> dict: """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body.""" @@ -4337,8 +4319,6 @@ def from_dict(cls, d: Dict[str, Any]) -> LegacyAlert: class LegacyAlertState(Enum): - """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" OK = "ok" TRIGGERED = "triggered" @@ -5318,7 +5298,6 @@ def from_dict(cls, d: Dict[str, Any]) -> OdbcParams: class OwnableObjectType(Enum): - """The singular form of the type of object which can be owned.""" ALERT = "alert" DASHBOARD = "dashboard" @@ -5403,7 +5382,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Parameter: class ParameterType(Enum): - """Parameters can have several different types.""" DATETIME = "datetime" ENUM = "enum" @@ -6673,7 +6651,6 @@ class ResultManifest: format: Optional[Format] = None schema: Optional[ResultSchema] = None - """The schema is an ordered list of column descriptions.""" total_byte_count: Optional[int] = None """The total number of bytes in the result set. This field is not available when using `INLINE` @@ -6779,8 +6756,6 @@ class RunAsMode(Enum): class RunAsRole(Enum): - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" OWNER = "owner" VIEWER = "viewer" @@ -7133,7 +7108,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StatementParameterListItem: @dataclass class StatementResponse: manifest: Optional[ResultManifest] = None - """The result manifest provides schema and metadata for the result set.""" result: Optional[ResultData] = None @@ -7142,7 +7116,6 @@ class StatementResponse: reference for all subsequent calls.""" status: Optional[StatementStatus] = None - """The status response includes execution state and if relevant, error information.""" def as_dict(self) -> dict: """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body.""" @@ -7203,11 +7176,6 @@ class StatementStatus: error: Optional[ServiceError] = None state: Optional[StatementState] = None - """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution - failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; - can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: - execution successful, and statement closed; result no longer available for fetch""" def as_dict(self) -> dict: """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body.""" @@ -7565,8 +7533,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipObjectId: @dataclass class TransferOwnershipRequest: - """Transfer object ownership""" - new_owner: Optional[str] = None """Email address for the new owner, who must exist in the workspace.""" @@ -8039,6 +8005,73 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequestVisualization ) +@dataclass +class UpdateWidgetRequest: + dashboard_id: str + """Dashboard ID returned by :method:dashboards/create.""" + + options: WidgetOptions + + width: int + """Width of a widget""" + + id: Optional[str] = None + """Widget ID returned by :method:dashboardwidgets/create""" + + text: Optional[str] = None + """If this is a textbox widget, the application displays this text. This field is ignored if the + widget contains a visualization in the `visualization` field.""" + + visualization_id: Optional[str] = None + """Query Vizualization ID returned by :method:queryvisualizations/create.""" + + def as_dict(self) -> dict: + """Serializes the UpdateWidgetRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options.as_dict() + if self.text is not None: + body["text"] = self.text + if self.visualization_id is not None: + body["visualization_id"] = self.visualization_id + if self.width is not None: + body["width"] = self.width + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWidgetRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options + if self.text is not None: + body["text"] = self.text + if self.visualization_id is not None: + body["visualization_id"] = self.visualization_id + if self.width is not None: + body["width"] = self.width + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateWidgetRequest: + """Deserializes the UpdateWidgetRequest from a dictionary.""" + return cls( + dashboard_id=d.get("dashboard_id", None), + id=d.get("id", None), + options=_from_dict(d, "options", WidgetOptions), + text=d.get("text", None), + visualization_id=d.get("visualization_id", None), + width=d.get("width", None), + ) + + @dataclass class User: email: Optional[str] = None @@ -8166,7 +8199,6 @@ class WarehouseAccessControlRequest: """name of the group""" permission_level: Optional[WarehousePermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -8277,7 +8309,6 @@ class WarehousePermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[WarehousePermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the WarehousePermission into a dictionary suitable for use as a JSON request body.""" @@ -8366,7 +8397,6 @@ class WarehousePermissionsDescription: description: Optional[str] = None permission_level: Optional[WarehousePermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the WarehousePermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -9092,7 +9122,7 @@ def create( text: Optional[str] = None, visualization_id: Optional[str] = None, ) -> Widget: - """Add widget to a dashboard + """Adds a widget to a dashboard :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. @@ -9127,7 +9157,7 @@ def create( return Widget.from_dict(res) def delete(self, id: str): - """Remove widget + """Removes a widget from a dashboard :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -9151,7 +9181,7 @@ def update( text: Optional[str] = None, visualization_id: Optional[str] = None, ) -> Widget: - """Update existing widget + """Updates an existing widget :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -9208,7 +9238,9 @@ def create( run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None, ) -> Dashboard: - """Create a dashboard object. + """Creates a new dashboard object. Only the name parameter is required in the POST request JSON body. + Other fields can be included when duplicating dashboards with this API. Databricks does not recommend + designing dashboards exclusively using this API.', :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. @@ -9314,17 +9346,11 @@ def list( "Accept": "application/json", } - # deduplicate items that may have been added during iteration - seen = set() query["page"] = 1 while True: json = self._api.do("GET", "/api/2.0/preview/sql/dashboards", query=query, headers=headers) if "results" in json: for v in json["results"]: - i = v["id"] - if i in seen: - continue - seen.add(i) yield Dashboard.from_dict(v) if "results" not in json or not json["results"]: return @@ -9888,17 +9914,11 @@ def list( "Accept": "application/json", } - # deduplicate items that may have been added during iteration - seen = set() query["page"] = 1 while True: json = self._api.do("GET", "/api/2.0/preview/sql/queries", query=query, headers=headers) if "results" in json: for v in json["results"]: - i = v["id"] - if i in seen: - continue - seen.add(i) yield LegacyQuery.from_dict(v) if "results" not in json or not json["results"]: return @@ -10133,7 +10153,7 @@ def __init__(self, api_client): self._api = api_client def create( - self, query_id: str, type: str, options: Any, *, description: Optional[str] = None, name: Optional[str] = None + self, options: Any, query_id: str, type: str, *, description: Optional[str] = None, name: Optional[str] = None ) -> LegacyVisualization: """Creates visualization in the query. @@ -10142,13 +10162,13 @@ def create( [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param options: Any + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. :param query_id: str The identifier returned by :method:queries/create :param type: str The type of visualization: chart, table, pivot table, and so on. - :param options: Any - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. :param description: str (optional) A short description of this visualization. This is not displayed in the UI. :param name: str (optional) @@ -10184,7 +10204,7 @@ def delete(self, id: str): [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str - Widget ID returned by :method:queryvizualisations/create + Widget ID returned by :method:queryvisualizations/create """ @@ -10717,15 +10737,12 @@ def create( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) - Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and - also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -10886,15 +10903,12 @@ def edit( Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) - Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and - also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index ea3af7760..a79a64db5 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -99,11 +99,6 @@ class CreateVectorIndexRequest: """Primary key of the index""" index_type: VectorIndexType - """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - with a source Delta Table, automatically and incrementally updating the index as the underlying - data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write - of vectors and metadata through our REST and SDK APIs. With this model, the user manages index - updates.""" delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None """Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.""" @@ -845,11 +840,6 @@ class MiniVectorIndex: """Name of the endpoint associated with the index""" index_type: Optional[VectorIndexType] = None - """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - with a source Delta Table, automatically and incrementally updating the index as the underlying - data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write - of vectors and metadata through our REST and SDK APIs. With this model, the user manages index - updates.""" name: Optional[str] = None """Name of the index""" @@ -1579,11 +1569,6 @@ class VectorIndex: """Name of the endpoint associated with the index""" index_type: Optional[VectorIndexType] = None - """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - with a source Delta Table, automatically and incrementally updating the index as the underlying - data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write - of vectors and metadata through our REST and SDK APIs. With this model, the user manages index - updates.""" name: Optional[str] = None """Name of the index""" @@ -1937,10 +1922,6 @@ def create_index( :param primary_key: str Primary key of the index :param index_type: :class:`VectorIndexType` - There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a - source Delta Table, automatically and incrementally updating the index as the underlying data in the - Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and - metadata through our REST and SDK APIs. With this model, the user manages index updates. :param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional) Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional) diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index a7ecb15a9..c529ebcf8 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -1456,7 +1456,6 @@ class RepoAccessControlRequest: """name of the group""" permission_level: Optional[RepoPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -1644,7 +1643,6 @@ class RepoPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[RepoPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the RepoPermission into a dictionary suitable for use as a JSON request body.""" @@ -1732,7 +1730,6 @@ class RepoPermissionsDescription: description: Optional[str] = None permission_level: Optional[RepoPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the RepoPermissionsDescription into a dictionary suitable for use as a JSON request body.""" @@ -2089,7 +2086,6 @@ class WorkspaceObjectAccessControlRequest: """name of the group""" permission_level: Optional[WorkspaceObjectPermissionLevel] = None - """Permission level""" service_principal_name: Optional[str] = None """application ID of a service principal""" @@ -2200,7 +2196,6 @@ class WorkspaceObjectPermission: inherited_from_object: Optional[List[str]] = None permission_level: Optional[WorkspaceObjectPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermission into a dictionary suitable for use as a JSON request body.""" @@ -2288,7 +2283,6 @@ class WorkspaceObjectPermissionsDescription: description: Optional[str] = None permission_level: Optional[WorkspaceObjectPermissionLevel] = None - """Permission level""" def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsDescription into a dictionary suitable for use as a JSON request body.""" diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst index ec3e8ffb2..3c2cbd92e 100644 --- a/docs/account/billing/budget_policy.rst +++ b/docs/account/billing/budget_policy.rst @@ -69,7 +69,8 @@ :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` - Contains the BudgetPolicy details. + The policy to update. `creator_user_id` cannot be specified in the request. All other fields must be + specified even if not changed. The `policy_id` is used to identify the policy to update. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst index 41c4df845..ae0ea1f53 100644 --- a/docs/account/billing/log_delivery.rst +++ b/docs/account/billing/log_delivery.rst @@ -123,7 +123,6 @@ [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` - * Log Delivery Configuration :returns: :class:`WrappedLogDeliveryConfiguration` diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst index 54b24ebc2..d558cdcf5 100644 --- a/docs/account/provisioning/networks.rst +++ b/docs/account/provisioning/networks.rst @@ -33,8 +33,6 @@ :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) - The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges). :param security_group_ids: List[str] (optional) IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations. @@ -42,10 +40,6 @@ IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple network configurations. :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) - If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS - PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst index 414ee23fd..8b28bcecb 100644 --- a/docs/account/provisioning/private_access.rst +++ b/docs/account/provisioning/private_access.rst @@ -59,11 +59,6 @@ [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) - The private access level controls which VPC endpoints can connect to the UI or API of any workspace - that attaches this private access settings object. * `ACCOUNT` level access (the default) allows - only VPC endpoints that are registered in your Databricks account connect to your workspace. * - `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, - see `allowed_vpc_endpoint_ids`. :param public_access_enabled: bool (optional) Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end @@ -211,11 +206,6 @@ [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) - The private access level controls which VPC endpoints can connect to the UI or API of any workspace - that attaches this private access settings object. * `ACCOUNT` level access (the default) allows - only VPC endpoints that are registered in your Databricks account connect to your workspace. * - `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, - see `allowed_vpc_endpoint_ids`. :param public_access_enabled: bool (optional) Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index 8e89e714f..a72721a6d 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -45,7 +45,6 @@ :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` - Root S3 bucket information. :returns: :class:`StorageConfiguration` diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst index 0807679f7..b639f3e1b 100644 --- a/docs/account/provisioning/vpc_endpoints.rst +++ b/docs/account/provisioning/vpc_endpoints.rst @@ -47,7 +47,6 @@ :param aws_vpc_endpoint_id: str (optional) The ID of the VPC endpoint object in AWS. :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional) - The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 4566dd0cf..f217f57eb 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -67,7 +67,6 @@ The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to `gcp`. :param cloud_resource_container: :class:`CloudResourceContainer` (optional) - The general workspace configurations that are specific to cloud providers. :param credentials_id: str (optional) ID of the workspace's credential configuration object. :param custom_tags: Dict[str,str] (optional) @@ -99,26 +98,7 @@ If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) - The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It - is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range - configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks - detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP - addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) - The configurations for the GKE cluster of a Databricks workspace. :param is_no_public_ip_enabled: bool (optional) Whether no public IP is enabled for the workspace. :param location: str (optional) @@ -129,9 +109,6 @@ history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) - The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst index 7e4b65b2e..e59783116 100644 --- a/docs/account/settings/ip_access_lists.rst +++ b/docs/account/settings/ip_access_lists.rst @@ -42,10 +42,6 @@ :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) :returns: :class:`CreateIpAccessListResponse` @@ -96,10 +92,6 @@ :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) @@ -131,10 +123,6 @@ :param label: str (optional) Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. \ No newline at end of file diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst index edfb87fb7..90c885c17 100644 --- a/docs/account/settings/network_connectivity.rst +++ b/docs/account/settings/network_connectivity.rst @@ -28,7 +28,6 @@ [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` - Properties of the new network connectivity configuration. :returns: :class:`NetworkConnectivityConfiguration` @@ -48,8 +47,6 @@ :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` - Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal - after initialization. :returns: :class:`NccPrivateEndpointRule` @@ -134,8 +131,6 @@ :param private_endpoint_rule_id: str Your private endpoint rule ID. :param private_endpoint_rule: :class:`UpdatePrivateEndpointRule` - Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal - after initialization. :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., diff --git a/docs/account/settings/network_policies.rst b/docs/account/settings/network_policies.rst index e6d9b5173..baecdc205 100644 --- a/docs/account/settings/network_policies.rst +++ b/docs/account/settings/network_policies.rst @@ -17,6 +17,7 @@ environment. :param network_policy: :class:`AccountNetworkPolicy` + Network policy configuration details. :returns: :class:`AccountNetworkPolicy` @@ -58,6 +59,7 @@ :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` + Updated network policy configuration details. :returns: :class:`AccountNetworkPolicy` \ No newline at end of file diff --git a/docs/account/settings/workspace_network_configuration.rst b/docs/account/settings/workspace_network_configuration.rst index 8e91bc291..307fe42cb 100644 --- a/docs/account/settings/workspace_network_configuration.rst +++ b/docs/account/settings/workspace_network_configuration.rst @@ -29,6 +29,7 @@ :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` + The network option details for the workspace. :returns: :class:`WorkspaceNetworkOption` \ No newline at end of file diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst index b04e12c38..eb914574b 100644 --- a/docs/dbdataclasses/aibuilder.rst +++ b/docs/dbdataclasses/aibuilder.rst @@ -8,10 +8,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelOptimizeResponse - :members: - :undoc-members: - .. autoclass:: CreateCustomLlmRequest :members: :undoc-members: @@ -24,10 +20,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteCustomLlmResponse - :members: - :undoc-members: - .. autoclass:: StartCustomLlmOptimizationRunRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index ca8408bdf..3c0c350e7 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -101,10 +101,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. py:class:: DeliveryStatus * The status string for log delivery. Possible values are: `CREATED`: There were no log delivery attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account. diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 9ebf9b05f..219b0a228 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -151,9 +151,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SYSTEM_CATALOG :value: "SYSTEM_CATALOG" - .. py:attribute:: UNKNOWN_CATALOG_TYPE - :value: "UNKNOWN_CATALOG_TYPE" - .. autoclass:: CloudflareApiToken :members: :undoc-members: @@ -166,6 +163,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ColumnRelationship + :members: + :undoc-members: + .. py:class:: ColumnTypeName .. py:attribute:: ARRAY @@ -240,13 +241,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VARIANT :value: "VARIANT" +.. autoclass:: ConnectionDependency + :members: + :undoc-members: + .. autoclass:: ConnectionInfo :members: :undoc-members: .. py:class:: ConnectionType - Next Id: 33 + Next Id: 36 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -389,6 +394,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateRequestExternalLineage + :members: + :undoc-members: + .. autoclass:: CreateResponse :members: :undoc-members: @@ -409,6 +418,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CredentialDependency + :members: + :undoc-members: + .. autoclass:: CredentialInfo :members: :undoc-members: @@ -423,7 +436,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CredentialType - Next Id: 12 + Next Id: 13 + + .. py:attribute:: ANY_STATIC_CREDENTIAL + :value: "ANY_STATIC_CREDENTIAL" .. py:attribute:: BEARER_TOKEN :value: "BEARER_TOKEN" @@ -481,27 +497,42 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DATABRICKS_FORMAT :value: "DATABRICKS_FORMAT" + .. py:attribute:: DATABRICKS_ROW_STORE_FORMAT + :value: "DATABRICKS_ROW_STORE_FORMAT" + .. py:attribute:: DELTA :value: "DELTA" .. py:attribute:: DELTASHARING :value: "DELTASHARING" - .. py:attribute:: HIVE_CUSTOM - :value: "HIVE_CUSTOM" + .. py:attribute:: DELTA_UNIFORM_HUDI + :value: "DELTA_UNIFORM_HUDI" + + .. py:attribute:: DELTA_UNIFORM_ICEBERG + :value: "DELTA_UNIFORM_ICEBERG" - .. py:attribute:: HIVE_SERDE - :value: "HIVE_SERDE" + .. py:attribute:: HIVE + :value: "HIVE" + + .. py:attribute:: ICEBERG + :value: "ICEBERG" .. py:attribute:: JSON :value: "JSON" + .. py:attribute:: MONGODB_FORMAT + :value: "MONGODB_FORMAT" + .. py:attribute:: MYSQL_FORMAT :value: "MYSQL_FORMAT" .. py:attribute:: NETSUITE_FORMAT :value: "NETSUITE_FORMAT" + .. py:attribute:: ORACLE_FORMAT + :value: "ORACLE_FORMAT" + .. py:attribute:: ORC :value: "ORC" @@ -514,6 +545,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REDSHIFT_FORMAT :value: "REDSHIFT_FORMAT" + .. py:attribute:: SALESFORCE_DATA_CLOUD_FORMAT + :value: "SALESFORCE_DATA_CLOUD_FORMAT" + .. py:attribute:: SALESFORCE_FORMAT :value: "SALESFORCE_FORMAT" @@ -526,6 +560,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SQLSERVER_FORMAT :value: "SQLSERVER_FORMAT" + .. py:attribute:: TERADATA_FORMAT + :value: "TERADATA_FORMAT" + .. py:attribute:: TEXT :value: "TEXT" @@ -558,6 +595,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteRequestExternalLineage + :members: + :undoc-members: + .. autoclass:: DeleteResponse :members: :undoc-members: @@ -635,10 +676,62 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ExternalLineageExternalMetadata + :members: + :undoc-members: + +.. autoclass:: ExternalLineageExternalMetadataInfo + :members: + :undoc-members: + +.. autoclass:: ExternalLineageFileInfo + :members: + :undoc-members: + +.. autoclass:: ExternalLineageInfo + :members: + :undoc-members: + +.. autoclass:: ExternalLineageModelVersion + :members: + :undoc-members: + +.. autoclass:: ExternalLineageModelVersionInfo + :members: + :undoc-members: + +.. autoclass:: ExternalLineageObject + :members: + :undoc-members: + +.. autoclass:: ExternalLineagePath + :members: + :undoc-members: + +.. autoclass:: ExternalLineageRelationship + :members: + :undoc-members: + +.. autoclass:: ExternalLineageRelationshipInfo + :members: + :undoc-members: + +.. autoclass:: ExternalLineageTable + :members: + :undoc-members: + +.. autoclass:: ExternalLineageTableInfo + :members: + :undoc-members: + .. autoclass:: ExternalLocationInfo :members: :undoc-members: +.. autoclass:: ExternalMetadata + :members: + :undoc-members: + .. autoclass:: FailedStatus :members: :undoc-members: @@ -777,6 +870,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ISOLATION_MODE_OPEN :value: "ISOLATION_MODE_OPEN" +.. py:class:: LineageDirection + + .. py:attribute:: DOWNSTREAM + :value: "DOWNSTREAM" + + .. py:attribute:: UPSTREAM + :value: "UPSTREAM" + .. autoclass:: ListAccountMetastoreAssignmentsResponse :members: :undoc-members: @@ -797,10 +898,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListExternalLineageRelationshipsResponse + :members: + :undoc-members: + .. autoclass:: ListExternalLocationsResponse :members: :undoc-members: +.. autoclass:: ListExternalMetadataResponse + :members: + :undoc-members: + .. autoclass:: ListFunctionsResponse :members: :undoc-members: @@ -1063,6 +1172,45 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: OptionSpec + :members: + :undoc-members: + +.. py:class:: OptionSpecOauthStage + + During the OAuth flow, specifies which stage the option should be displayed in the UI. OAUTH_STAGE_UNSPECIFIED is the default value for options unrelated to the OAuth flow. BEFORE_AUTHORIZATION_CODE corresponds to options necessary to initiate the OAuth process. BEFORE_ACCESS_TOKEN corresponds to options that are necessary to create a foreign connection, but that should be displayed after the authorization code has already been received. + + .. py:attribute:: BEFORE_ACCESS_TOKEN + :value: "BEFORE_ACCESS_TOKEN" + + .. py:attribute:: BEFORE_AUTHORIZATION_CODE + :value: "BEFORE_AUTHORIZATION_CODE" + +.. py:class:: OptionSpecOptionType + + Type of the option, we purposely follow JavaScript types so that the UI can map the options to JS types. https://www.w3schools.com/js/js_datatypes.asp Enum is a special case that it's just string with selections. + + .. py:attribute:: OPTION_BIGINT + :value: "OPTION_BIGINT" + + .. py:attribute:: OPTION_BOOLEAN + :value: "OPTION_BOOLEAN" + + .. py:attribute:: OPTION_ENUM + :value: "OPTION_ENUM" + + .. py:attribute:: OPTION_MULTILINE_STRING + :value: "OPTION_MULTILINE_STRING" + + .. py:attribute:: OPTION_NUMBER + :value: "OPTION_NUMBER" + + .. py:attribute:: OPTION_SERVICE_CREDENTIAL + :value: "OPTION_SERVICE_CREDENTIAL" + + .. py:attribute:: OPTION_STRING + :value: "OPTION_STRING" + .. autoclass:: PermissionsChange :members: :undoc-members: @@ -1284,6 +1432,194 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SecurableKind + + Latest kind: TABLE_DELTA_ICEBERG_DELTASHARING = 252; Next id:253 + + .. py:attribute:: TABLE_DB_STORAGE + :value: "TABLE_DB_STORAGE" + + .. py:attribute:: TABLE_DELTA + :value: "TABLE_DELTA" + + .. py:attribute:: TABLE_DELTASHARING + :value: "TABLE_DELTASHARING" + + .. py:attribute:: TABLE_DELTASHARING_MUTABLE + :value: "TABLE_DELTASHARING_MUTABLE" + + .. py:attribute:: TABLE_DELTA_EXTERNAL + :value: "TABLE_DELTA_EXTERNAL" + + .. py:attribute:: TABLE_DELTA_ICEBERG_DELTASHARING + :value: "TABLE_DELTA_ICEBERG_DELTASHARING" + + .. py:attribute:: TABLE_DELTA_ICEBERG_MANAGED + :value: "TABLE_DELTA_ICEBERG_MANAGED" + + .. py:attribute:: TABLE_DELTA_UNIFORM_HUDI_EXTERNAL + :value: "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" + + .. py:attribute:: TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL + :value: "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" + + .. py:attribute:: TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL + :value: "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" + + .. py:attribute:: TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED + :value: "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED" + + .. py:attribute:: TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE + :value: "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE" + + .. py:attribute:: TABLE_EXTERNAL + :value: "TABLE_EXTERNAL" + + .. py:attribute:: TABLE_FEATURE_STORE + :value: "TABLE_FEATURE_STORE" + + .. py:attribute:: TABLE_FEATURE_STORE_EXTERNAL + :value: "TABLE_FEATURE_STORE_EXTERNAL" + + .. py:attribute:: TABLE_FOREIGN_BIGQUERY + :value: "TABLE_FOREIGN_BIGQUERY" + + .. py:attribute:: TABLE_FOREIGN_DATABRICKS + :value: "TABLE_FOREIGN_DATABRICKS" + + .. py:attribute:: TABLE_FOREIGN_DELTASHARING + :value: "TABLE_FOREIGN_DELTASHARING" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE + :value: "TABLE_FOREIGN_HIVE_METASTORE" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL + :value: "TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED + :value: "TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL + :value: "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED + :value: "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW + :value: "TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL + :value: "TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_MANAGED + :value: "TABLE_FOREIGN_HIVE_METASTORE_MANAGED" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL + :value: "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED + :value: "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED" + + .. py:attribute:: TABLE_FOREIGN_HIVE_METASTORE_VIEW + :value: "TABLE_FOREIGN_HIVE_METASTORE_VIEW" + + .. py:attribute:: TABLE_FOREIGN_MONGODB + :value: "TABLE_FOREIGN_MONGODB" + + .. py:attribute:: TABLE_FOREIGN_MYSQL + :value: "TABLE_FOREIGN_MYSQL" + + .. py:attribute:: TABLE_FOREIGN_NETSUITE + :value: "TABLE_FOREIGN_NETSUITE" + + .. py:attribute:: TABLE_FOREIGN_ORACLE + :value: "TABLE_FOREIGN_ORACLE" + + .. py:attribute:: TABLE_FOREIGN_POSTGRESQL + :value: "TABLE_FOREIGN_POSTGRESQL" + + .. py:attribute:: TABLE_FOREIGN_REDSHIFT + :value: "TABLE_FOREIGN_REDSHIFT" + + .. py:attribute:: TABLE_FOREIGN_SALESFORCE + :value: "TABLE_FOREIGN_SALESFORCE" + + .. py:attribute:: TABLE_FOREIGN_SALESFORCE_DATA_CLOUD + :value: "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD" + + .. py:attribute:: TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING + :value: "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING" + + .. py:attribute:: TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW + :value: "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW" + + .. py:attribute:: TABLE_FOREIGN_SNOWFLAKE + :value: "TABLE_FOREIGN_SNOWFLAKE" + + .. py:attribute:: TABLE_FOREIGN_SQLDW + :value: "TABLE_FOREIGN_SQLDW" + + .. py:attribute:: TABLE_FOREIGN_SQLSERVER + :value: "TABLE_FOREIGN_SQLSERVER" + + .. py:attribute:: TABLE_FOREIGN_TERADATA + :value: "TABLE_FOREIGN_TERADATA" + + .. py:attribute:: TABLE_FOREIGN_WORKDAY_RAAS + :value: "TABLE_FOREIGN_WORKDAY_RAAS" + + .. py:attribute:: TABLE_ICEBERG_UNIFORM_MANAGED + :value: "TABLE_ICEBERG_UNIFORM_MANAGED" + + .. py:attribute:: TABLE_INTERNAL + :value: "TABLE_INTERNAL" + + .. py:attribute:: TABLE_MANAGED_POSTGRESQL + :value: "TABLE_MANAGED_POSTGRESQL" + + .. py:attribute:: TABLE_MATERIALIZED_VIEW + :value: "TABLE_MATERIALIZED_VIEW" + + .. py:attribute:: TABLE_MATERIALIZED_VIEW_DELTASHARING + :value: "TABLE_MATERIALIZED_VIEW_DELTASHARING" + + .. py:attribute:: TABLE_METRIC_VIEW + :value: "TABLE_METRIC_VIEW" + + .. py:attribute:: TABLE_ONLINE_VECTOR_INDEX_DIRECT + :value: "TABLE_ONLINE_VECTOR_INDEX_DIRECT" + + .. py:attribute:: TABLE_ONLINE_VECTOR_INDEX_REPLICA + :value: "TABLE_ONLINE_VECTOR_INDEX_REPLICA" + + .. py:attribute:: TABLE_ONLINE_VIEW + :value: "TABLE_ONLINE_VIEW" + + .. py:attribute:: TABLE_STANDARD + :value: "TABLE_STANDARD" + + .. py:attribute:: TABLE_STREAMING_LIVE_TABLE + :value: "TABLE_STREAMING_LIVE_TABLE" + + .. py:attribute:: TABLE_STREAMING_LIVE_TABLE_DELTASHARING + :value: "TABLE_STREAMING_LIVE_TABLE_DELTASHARING" + + .. py:attribute:: TABLE_SYSTEM + :value: "TABLE_SYSTEM" + + .. py:attribute:: TABLE_SYSTEM_DELTASHARING + :value: "TABLE_SYSTEM_DELTASHARING" + + .. py:attribute:: TABLE_VIEW + :value: "TABLE_VIEW" + + .. py:attribute:: TABLE_VIEW_DELTASHARING + :value: "TABLE_VIEW_DELTASHARING" + +.. autoclass:: SecurableKindManifest + :members: + :undoc-members: + .. py:class:: SecurableType The type of Unity Catalog securable. @@ -1336,9 +1672,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE :value: "TABLE" - .. py:attribute:: UNKNOWN_SECURABLE_TYPE - :value: "UNKNOWN_SECURABLE_TYPE" - .. py:attribute:: VOLUME :value: "VOLUME" @@ -1370,6 +1703,71 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SystemType + + .. py:attribute:: AMAZON_REDSHIFT + :value: "AMAZON_REDSHIFT" + + .. py:attribute:: AZURE_SYNAPSE + :value: "AZURE_SYNAPSE" + + .. py:attribute:: CONFLUENT + :value: "CONFLUENT" + + .. py:attribute:: GOOGLE_BIGQUERY + :value: "GOOGLE_BIGQUERY" + + .. py:attribute:: KAFKA + :value: "KAFKA" + + .. py:attribute:: LOOKER + :value: "LOOKER" + + .. py:attribute:: MICROSOFT_FABRIC + :value: "MICROSOFT_FABRIC" + + .. py:attribute:: MICROSOFT_SQL_SERVER + :value: "MICROSOFT_SQL_SERVER" + + .. py:attribute:: MONGODB + :value: "MONGODB" + + .. py:attribute:: MYSQL + :value: "MYSQL" + + .. py:attribute:: ORACLE + :value: "ORACLE" + + .. py:attribute:: OTHER + :value: "OTHER" + + .. py:attribute:: POSTGRESQL + :value: "POSTGRESQL" + + .. py:attribute:: POWER_BI + :value: "POWER_BI" + + .. py:attribute:: SALESFORCE + :value: "SALESFORCE" + + .. py:attribute:: SAP + :value: "SAP" + + .. py:attribute:: SERVICENOW + :value: "SERVICENOW" + + .. py:attribute:: SNOWFLAKE + :value: "SNOWFLAKE" + + .. py:attribute:: TABLEAU + :value: "TABLEAU" + + .. py:attribute:: TERADATA + :value: "TERADATA" + + .. py:attribute:: WORKDAY + :value: "WORKDAY" + .. autoclass:: TableConstraint :members: :undoc-members: @@ -1422,6 +1820,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MATERIALIZED_VIEW :value: "MATERIALIZED_VIEW" + .. py:attribute:: METRIC_VIEW + :value: "METRIC_VIEW" + .. py:attribute:: STREAMING_TABLE :value: "STREAMING_TABLE" @@ -1500,6 +1901,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateRequestExternalLineage + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst index b07745b6f..812ac1eae 100644 --- a/docs/dbdataclasses/cleanrooms.rst +++ b/docs/dbdataclasses/cleanrooms.rst @@ -160,10 +160,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: - .. autoclass:: ListCleanRoomAssetsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index a283d6dd9..9562320c2 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -1550,6 +1550,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SECRET_RESOLUTION_ERROR :value: "SECRET_RESOLUTION_ERROR" + .. py:attribute:: SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION + :value: "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" + .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index c2ddc82f5..87d116dad 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -25,14 +25,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DASHBOARD_VIEW_BASIC :value: "DASHBOARD_VIEW_BASIC" -.. autoclass:: DeleteScheduleResponse - :members: - :undoc-members: - -.. autoclass:: DeleteSubscriptionResponse - :members: - :undoc-members: - .. autoclass:: GenieAttachment :members: :undoc-members: @@ -41,19 +33,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenieCreateConversationMessageRequest +.. autoclass:: GenieConversationSummary :members: :undoc-members: -.. autoclass:: GenieGenerateDownloadFullQueryResultResponse +.. autoclass:: GenieCreateConversationMessageRequest :members: :undoc-members: -.. autoclass:: GenieGetDownloadFullQueryResultResponse +.. autoclass:: GenieGetMessageQueryResultResponse :members: :undoc-members: -.. autoclass:: GenieGetMessageQueryResultResponse +.. autoclass:: GenieListConversationsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/database.rst b/docs/dbdataclasses/database.rst index 86340b5ef..008025d7d 100644 --- a/docs/dbdataclasses/database.rst +++ b/docs/dbdataclasses/database.rst @@ -16,6 +16,39 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DatabaseInstanceRef + :members: + :undoc-members: + +.. autoclass:: DatabaseInstanceRole + :members: + :undoc-members: + +.. autoclass:: DatabaseInstanceRoleAttributes + :members: + :undoc-members: + +.. py:class:: DatabaseInstanceRoleIdentityType + + .. py:attribute:: GROUP + :value: "GROUP" + + .. py:attribute:: PG_ONLY + :value: "PG_ONLY" + + .. py:attribute:: SERVICE_PRINCIPAL + :value: "SERVICE_PRINCIPAL" + + .. py:attribute:: USER + :value: "USER" + +.. py:class:: DatabaseInstanceRoleMembershipRole + + Roles that the DatabaseInstanceRole can be a member of. + + .. py:attribute:: DATABRICKS_SUPERUSER + :value: "DATABRICKS_SUPERUSER" + .. py:class:: DatabaseInstanceState .. py:attribute:: AVAILABLE @@ -40,23 +73,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteDatabaseCatalogResponse +.. autoclass:: DeltaTableSyncInfo :members: :undoc-members: -.. autoclass:: DeleteDatabaseInstanceResponse - :members: - :undoc-members: - -.. autoclass:: DeleteDatabaseTableResponse +.. autoclass:: GenerateDatabaseCredentialRequest :members: :undoc-members: -.. autoclass:: DeleteSyncedDatabaseTableResponse - :members: - :undoc-members: - -.. autoclass:: GenerateDatabaseCredentialRequest +.. autoclass:: ListDatabaseInstanceRolesResponse :members: :undoc-members: @@ -88,6 +113,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: UPDATING :value: "UPDATING" +.. autoclass:: RequestedClaims + :members: + :undoc-members: + +.. py:class:: RequestedClaimsPermissionSet + + Might add WRITE in the future + + .. py:attribute:: READ_ONLY + :value: "READ_ONLY" + +.. autoclass:: RequestedResource + :members: + :undoc-members: + .. autoclass:: SyncedDatabaseTable :members: :undoc-members: @@ -104,6 +144,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SyncedTablePosition + :members: + :undoc-members: + .. autoclass:: SyncedTableProvisioningStatus :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 75a9798db..a1db3ebcc 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -10,9 +10,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ActivityAction - An action that a user (with sufficient permissions) could take on an activity. Valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + An action that a user (with sufficient permissions) could take on an activity or comment. + For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request * `REJECT_TRANSITION_REQUEST`: Reject a transition request * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + For comments, valid values are: * `EDIT_COMMENT`: Edit the comment + * `DELETE_COMMENT`: Delete the comment .. py:attribute:: APPROVE_TRANSITION_REQUEST :value: "APPROVE_TRANSITION_REQUEST" @@ -20,6 +23,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CANCEL_TRANSITION_REQUEST :value: "CANCEL_TRANSITION_REQUEST" + .. py:attribute:: DELETE_COMMENT + :value: "DELETE_COMMENT" + + .. py:attribute:: EDIT_COMMENT + :value: "EDIT_COMMENT" + .. py:attribute:: REJECT_TRANSITION_REQUEST :value: "REJECT_TRANSITION_REQUEST" @@ -63,15 +72,28 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CommentActivityAction - An action that a user (with sufficient permissions) could take on a comment. Valid values are: * `EDIT_COMMENT`: Edit the comment + An action that a user (with sufficient permissions) could take on an activity or comment. + For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + * `REJECT_TRANSITION_REQUEST`: Reject a transition request + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + For comments, valid values are: * `EDIT_COMMENT`: Edit the comment * `DELETE_COMMENT`: Delete the comment + .. py:attribute:: APPROVE_TRANSITION_REQUEST + :value: "APPROVE_TRANSITION_REQUEST" + + .. py:attribute:: CANCEL_TRANSITION_REQUEST + :value: "CANCEL_TRANSITION_REQUEST" + .. py:attribute:: DELETE_COMMENT :value: "DELETE_COMMENT" .. py:attribute:: EDIT_COMMENT :value: "EDIT_COMMENT" + .. py:attribute:: REJECT_TRANSITION_REQUEST + :value: "REJECT_TRANSITION_REQUEST" + .. autoclass:: CommentObject :members: :undoc-members: @@ -192,10 +214,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteOnlineStoreResponse - :members: - :undoc-members: - .. autoclass:: DeleteRun :members: :undoc-members: @@ -224,20 +242,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: DeleteTransitionRequestStage - - .. py:attribute:: ARCHIVED - :value: "ARCHIVED" - - .. py:attribute:: NONE - :value: "NONE" - - .. py:attribute:: PRODUCTION - :value: "PRODUCTION" - - .. py:attribute:: STAGING - :value: "STAGING" - .. autoclass:: DeleteWebhookResponse :members: :undoc-members: @@ -287,6 +291,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Feature + :members: + :undoc-members: + +.. autoclass:: FeatureLineage + :members: + :undoc-members: + +.. autoclass:: FeatureLineageFeatureSpec + :members: + :undoc-members: + +.. autoclass:: FeatureLineageModel + :members: + :undoc-members: + +.. autoclass:: FeatureLineageOnlineFeature + :members: + :undoc-members: + +.. autoclass:: FeatureList + :members: + :undoc-members: + +.. autoclass:: FeatureTag + :members: + :undoc-members: + .. autoclass:: FileInfo :members: :undoc-members: @@ -396,6 +428,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListFeatureTagsResponse + :members: + :undoc-members: + .. autoclass:: ListModelsResponse :members: :undoc-members: @@ -535,7 +571,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ModelVersionStatus - Current status of `model_version` + The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. + * `FAILED_REGISTRATION`: Request to register a new model version has failed. + * `READY`: Model version is ready for use. .. py:attribute:: FAILED_REGISTRATION :value: "FAILED_REGISTRATION" @@ -582,6 +620,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..). + .. py:attribute:: CAN_CREATE_REGISTERED_MODEL + :value: "CAN_CREATE_REGISTERED_MODEL" + .. py:attribute:: CAN_EDIT :value: "CAN_EDIT" @@ -660,6 +701,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RegistryEmailSubscriptionType + + .. note:: Experimental: This entity may change or be removed in a future release without warning. Email subscription types for registry notifications: - `ALL_EVENTS`: Subscribed to all events. - `DEFAULT`: Default subscription type. - `SUBSCRIBED`: Subscribed to notifications. - `UNSUBSCRIBED`: Not subscribed to notifications. + + .. py:attribute:: ALL_EVENTS + :value: "ALL_EVENTS" + + .. py:attribute:: DEFAULT + :value: "DEFAULT" + + .. py:attribute:: SUBSCRIBED + :value: "SUBSCRIBED" + + .. py:attribute:: UNSUBSCRIBED + :value: "UNSUBSCRIBED" + .. autoclass:: RegistryWebhook :members: :undoc-members: @@ -876,26 +933,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: Stage - - Stage of the model version. Valid values are: - * `None`: The initial stage of a model version. - * `Staging`: Staging or pre-production stage. - * `Production`: Production stage. - * `Archived`: Archived stage. - - .. py:attribute:: ARCHIVED - :value: "ARCHIVED" - - .. py:attribute:: NONE - :value: "NONE" - - .. py:attribute:: PRODUCTION - :value: "PRODUCTION" - - .. py:attribute:: STAGING - :value: "STAGING" - .. py:class:: Status The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -911,10 +948,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: READY :value: "READY" -.. autoclass:: TestRegistryWebhook - :members: - :undoc-members: - .. autoclass:: TestRegistryWebhookRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 44679fc41..5fa26e596 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -132,6 +132,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: IngestionSourceType + .. py:attribute:: BIGQUERY + :value: "BIGQUERY" + .. py:attribute:: DYNAMICS365 :value: "DYNAMICS365" @@ -420,6 +423,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo The SCD type to use to ingest the table. + .. py:attribute:: APPEND_ONLY + :value: "APPEND_ONLY" + .. py:attribute:: SCD_TYPE_1 :value: "SCD_TYPE_1" diff --git a/docs/dbdataclasses/qualitymonitorv2.rst b/docs/dbdataclasses/qualitymonitorv2.rst index fbe2746ce..9f4df6ee6 100644 --- a/docs/dbdataclasses/qualitymonitorv2.rst +++ b/docs/dbdataclasses/qualitymonitorv2.rst @@ -36,10 +36,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR :value: "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" -.. autoclass:: DeleteQualityMonitorResponse - :members: - :undoc-members: - .. autoclass:: ListQualityMonitorResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 01249dced..6c1bc106d 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -45,9 +45,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ENDPOINT :value: "ENDPOINT" + .. py:attribute:: SERVICE_PRINCIPAL + :value: "SERVICE_PRINCIPAL" + .. py:attribute:: USER :value: "USER" + .. py:attribute:: USER_GROUP + :value: "USER_GROUP" + .. py:class:: AiGatewayRateLimitRenewalPeriod .. py:attribute:: MINUTE diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 0f97314d2..f120095b6 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -277,14 +277,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteNetworkConnectivityConfigurationResponse - :members: - :undoc-members: - -.. autoclass:: DeleteNetworkPolicyRpcResponse - :members: - :undoc-members: - .. autoclass:: DeletePersonalComputeSettingResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 2c2578d90..22ec2a6ca 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -614,8 +614,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: LegacyAlertState - State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). - .. py:attribute:: OK :value: "OK" @@ -731,8 +729,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: OwnableObjectType - The singular form of the type of object which can be owned. - .. py:attribute:: ALERT :value: "ALERT" @@ -748,8 +744,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ParameterType - Parameters can have several different types. - .. py:attribute:: DATETIME :value: "DATETIME" @@ -969,8 +963,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunAsRole - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - .. py:attribute:: OWNER :value: "OWNER" @@ -1477,6 +1469,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateWidgetRequest + :members: + :undoc-members: + .. autoclass:: User :members: :undoc-members: diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index 06172aa93..d3761b9db 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -40,6 +40,7 @@ :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` + The app deployment configuration. :returns: Long-running operation waiter for :class:`AppDeployment`. diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst index 9784e5787..93b49d3f4 100644 --- a/docs/workspace/catalog/credentials.rst +++ b/docs/workspace/catalog/credentials.rst @@ -66,9 +66,7 @@ :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) - The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) - The GCP cloud options to customize the requested temporary credential :returns: :class:`TemporaryCredentials` @@ -159,13 +157,10 @@ metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) - The Azure managed identity configuration. :param credential_name: str (optional) Required. The name of an existing credential or long-lived cloud credential to validate. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. :param external_location_name: str (optional) The name of an existing external location to validate. Only applicable for storage credentials (purpose is **STORAGE**.) diff --git a/docs/workspace/catalog/external_lineage.rst b/docs/workspace/catalog/external_lineage.rst new file mode 100644 index 000000000..e6369c41f --- /dev/null +++ b/docs/workspace/catalog/external_lineage.rst @@ -0,0 +1,67 @@ +``w.external_lineage``: External Lineage +======================================== +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: ExternalLineageAPI + + External Lineage APIs enable defining and managing lineage relationships between Databricks objects and + external systems. These APIs allow users to capture data flows connecting Databricks tables, models, and + file paths with external metadata objects. + + With these APIs, users can create, update, delete, and list lineage relationships with support for + column-level mappings and custom properties. + + .. py:method:: create_external_lineage_relationship(external_lineage_relationship: CreateRequestExternalLineage) -> ExternalLineageRelationship + + Creates an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`CreateRequestExternalLineage` + + :returns: :class:`ExternalLineageRelationship` + + + .. py:method:: delete_external_lineage_relationship(external_lineage_relationship: DeleteRequestExternalLineage) + + Deletes an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`DeleteRequestExternalLineage` + + + + + .. py:method:: list_external_lineage_relationships(object_info: ExternalLineageObject, lineage_direction: LineageDirection [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExternalLineageInfo] + + Lists external lineage relationships of a Databricks object or external metadata given a supplied + direction. + + :param object_info: :class:`ExternalLineageObject` + The object to query external lineage relationship on. + :param lineage_direction: :class:`LineageDirection` + The lineage direction to filter on. + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ExternalLineageInfo` + + + .. py:method:: update_external_lineage_relationship(external_lineage_relationship: UpdateRequestExternalLineage, update_mask: str) -> ExternalLineageRelationship + + Updates an external lineage relationship between a Databricks or external metadata object and another + external metadata object. + + :param external_lineage_relationship: :class:`UpdateRequestExternalLineage` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`ExternalLineageRelationship` + \ No newline at end of file diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index e12792487..624fe1958 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -60,15 +60,14 @@ :param comment: str (optional) User-provided free-form text description. :param enable_file_events: bool (optional) - [Create:OPT Update:OPT] Whether to enable file events on this external location. + Whether to enable file events on this external location. :param encryption_details: :class:`EncryptionDetails` (optional) - Encryption options that apply to clients connecting to cloud storage. :param fallback: bool (optional) Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - [Create:OPT Update:OPT] File event queue settings. + File event queue settings. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -213,15 +212,14 @@ :param credential_name: str (optional) Name of the storage credential used with this location. :param enable_file_events: bool (optional) - [Create:OPT Update:OPT] Whether to enable file events on this external location. + Whether to enable file events on this external location. :param encryption_details: :class:`EncryptionDetails` (optional) - Encryption options that apply to clients connecting to cloud storage. :param fallback: bool (optional) Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient. :param file_event_queue: :class:`FileEventQueue` (optional) - [Create:OPT Update:OPT] File event queue settings. + File event queue settings. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) diff --git a/docs/workspace/catalog/external_metadata.rst b/docs/workspace/catalog/external_metadata.rst new file mode 100644 index 000000000..79d3520aa --- /dev/null +++ b/docs/workspace/catalog/external_metadata.rst @@ -0,0 +1,80 @@ +``w.external_metadata``: External Metadata +========================================== +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: ExternalMetadataAPI + + External Metadata objects enable customers to register and manage metadata about external systems within + Unity Catalog. + + These APIs provide a standardized way to create, update, retrieve, list, and delete external metadata + objects. Fine-grained authorization ensures that only users with appropriate permissions can view and + manage external metadata objects. + + .. py:method:: create_external_metadata(external_metadata: ExternalMetadata) -> ExternalMetadata + + Creates a new external metadata object in the parent metastore if the caller is a metastore admin or + has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users upon creation + by default. + + :param external_metadata: :class:`ExternalMetadata` + + :returns: :class:`ExternalMetadata` + + + .. py:method:: delete_external_metadata(name: str) + + Deletes the external metadata object that matches the supplied name. The caller must be a metastore + admin, the owner of the external metadata object, or a user that has the **MANAGE** privilege. + + :param name: str + + + + + .. py:method:: get_external_metadata(name: str) -> ExternalMetadata + + Gets the specified external metadata object in a metastore. The caller must be a metastore admin, the + owner of the external metadata object, or a user that has the **BROWSE** privilege. + + :param name: str + + :returns: :class:`ExternalMetadata` + + + .. py:method:: list_external_metadata( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExternalMetadata] + + Gets an array of external metadata objects in the metastore. If the caller is the metastore admin, all + external metadata objects will be retrieved. Otherwise, only external metadata objects that the caller + has **BROWSE** on will be retrieved. There is no guarantee of a specific ordering of the elements in + the array. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ExternalMetadata` + + + .. py:method:: update_external_metadata(name: str, external_metadata: ExternalMetadata, update_mask: str) -> ExternalMetadata + + Updates the external metadata object that matches the supplied name. The caller can only update either + the owner or other metadata fields in one request. The caller must be a metastore admin, the owner of + the external metadata object, or a user that has the **MODIFY** privilege. If the caller is updating + the owner, they must also have the **MANAGE** privilege. + + :param name: str + Name of the external metadata object. + :param external_metadata: :class:`ExternalMetadata` + :param update_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`ExternalMetadata` + \ No newline at end of file diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst index 471804098..1a84c4e74 100644 --- a/docs/workspace/catalog/index.rst +++ b/docs/workspace/catalog/index.rst @@ -11,7 +11,9 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas, catalogs connections credentials + external_lineage external_locations + external_metadata functions grants metastores diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst index 23768bcb5..4ed7e718f 100644 --- a/docs/workspace/catalog/online_tables.rst +++ b/docs/workspace/catalog/online_tables.rst @@ -11,7 +11,7 @@ Create a new Online Table. :param table: :class:`OnlineTable` - Online Table information. + Specification of the online table to be created. :returns: Long-running operation waiter for :class:`OnlineTable`. diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst index 96243192c..2813e6835 100644 --- a/docs/workspace/catalog/table_constraints.rst +++ b/docs/workspace/catalog/table_constraints.rst @@ -29,8 +29,6 @@ :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` - A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. :returns: :class:`TableConstraint` diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index fdb5164f0..efeea33f6 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -91,16 +91,16 @@ Full name of the table. :param include_browse: bool (optional) Whether to include tables in the response for which the principal can only access selective metadata - for + for. :param include_delta_metadata: bool (optional) Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :returns: :class:`TableInfo` - .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] + .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo] Usage: @@ -135,11 +135,9 @@ Parent schema of tables. :param include_browse: bool (optional) Whether to include tables in the response for which the principal can only access selective metadata - for - :param include_delta_metadata: bool (optional) - Whether delta metadata should be included in the response. + for. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :param max_results: int (optional) Maximum number of tables to return. If not set, all the tables are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and a server @@ -195,7 +193,7 @@ :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) - Whether to include a manifest containing capabilities the table has. + Whether to include a manifest containing table capabilities in the response. :param max_results: int (optional) Maximum number of summaries for tables to return. If not set, the page length is set to a server configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is @@ -222,6 +220,7 @@ :param full_name: str Full name of the table. :param owner: str (optional) + Username of current owner of table. \ No newline at end of file diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst index a3472518a..78d84fd57 100644 --- a/docs/workspace/catalog/volumes.rst +++ b/docs/workspace/catalog/volumes.rst @@ -81,11 +81,6 @@ :param name: str The name of the volume :param volume_type: :class:`VolumeType` - The type of the volume. An external volume is located in the specified external location. A managed - volume is located in the default location which is specified by the parent schema, or the parent - catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst index 86ab44e6d..c9e35af33 100644 --- a/docs/workspace/cleanrooms/clean_room_assets.rst +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -17,7 +17,6 @@ :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` - Metadata of the clean room asset :returns: :class:`CleanRoomAsset` @@ -80,7 +79,8 @@ For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` - Metadata of the clean room asset + The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset to + update. :returns: :class:`CleanRoomAsset` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst index 6a987175c..7288faf26 100644 --- a/docs/workspace/cleanrooms/clean_rooms.rst +++ b/docs/workspace/cleanrooms/clean_rooms.rst @@ -6,7 +6,7 @@ A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to - each other’s data. + each other's data. .. py:method:: create(clean_room: CleanRoom) -> CleanRoom diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index cf535e30d..d46b8ecd0 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -153,30 +153,6 @@ - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) - Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. * - `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias - for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple - users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: - A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. - Most programming languages, cluster features and data governance features are available in this - mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are - fully isolated so that they cannot see each other's data and credentials. Most data governance - features are supported in this mode. But programming languages and cluster features might be - limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency - clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on - standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC - nor passthrough enabled. :param docker_image: :class:`DockerImage` (optional) Custom docker image BYOC :param driver_instance_pool_id: str (optional) @@ -210,19 +186,6 @@ When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) - The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -281,7 +244,6 @@ `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) - Cluster Attributes showing for clusters workload types. :returns: Long-running operation waiter for :class:`ClusterDetails`. @@ -426,30 +388,6 @@ - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) - Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. * - `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias - for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple - users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: - A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. - Most programming languages, cluster features and data governance features are available in this - mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are - fully isolated so that they cannot see each other's data and credentials. Most data governance - features are supported in this mode. But programming languages and cluster features might be - limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency - clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on - standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC - nor passthrough enabled. :param docker_image: :class:`DockerImage` (optional) Custom docker image BYOC :param driver_instance_pool_id: str (optional) @@ -483,19 +421,6 @@ When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) - The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute @@ -554,7 +479,6 @@ `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) - Cluster Attributes showing for clusters workload types. :returns: Long-running operation waiter for :class:`ClusterDetails`. diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index 1f0221ed3..86e243930 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -29,44 +29,22 @@ .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage - .. py:method:: execute_message_attachment_query(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse + .. py:method:: delete_conversation(space_id: str, conversation_id: str) - Execute the SQL for a message query attachment. Use this API when the query attachment has expired and - needs to be re-executed. + Delete a conversation. :param space_id: str - Genie space ID + The ID associated with the Genie space where the conversation is located. :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - :param attachment_id: str - Attachment ID - - :returns: :class:`GenieGetMessageQueryResultResponse` - + The ID of the conversation to delete. - .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse - - Execute the SQL query in the message. - - :param space_id: str - Genie space ID - :param conversation_id: str - Conversation ID - :param message_id: str - Message ID - :returns: :class:`GenieGetMessageQueryResultResponse` - .. py:method:: generate_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGenerateDownloadFullQueryResultResponse + .. py:method:: execute_message_attachment_query(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse - Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of - the download. The query result is stored in an external link and can be retrieved using the [Get - Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. - See [Execute Statement](:method:statementexecution/executestatement) for more details. + Execute the SQL for a message query attachment. Use this API when the query attachment has expired and + needs to be re-executed. :param space_id: str Genie space ID @@ -77,18 +55,12 @@ :param attachment_id: str Attachment ID - :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` + :returns: :class:`GenieGetMessageQueryResultResponse` - .. py:method:: get_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str) -> GenieGetDownloadFullQueryResultResponse + .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse - After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and - successfully receiving a `download_id`, use this API to poll the download progress. When the download - is complete, the API returns one or more external links to the query result files. Warning: Databricks - strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. - You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` - disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute - Statement](:method:statementexecution/executestatement) for more details. + Execute the SQL query in the message. :param space_id: str Genie space ID @@ -96,13 +68,8 @@ Conversation ID :param message_id: str Message ID - :param attachment_id: str - Attachment ID - :param download_id: str - Download ID. This ID is provided by the [Generate Download - endpoint](:method:genie/generateDownloadFullQueryResult) - :returns: :class:`GenieGetDownloadFullQueryResultResponse` + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage @@ -178,6 +145,20 @@ :returns: :class:`GenieSpace` + .. py:method:: list_conversations(space_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListConversationsResponse + + Get a list of conversations in a Genie Space. + + :param space_id: str + The ID of the Genie space to retrieve conversations from. + :param page_size: int (optional) + Maximum number of conversations to return per page + :param page_token: str (optional) + Token to get the next page of results + + :returns: :class:`GenieListConversationsResponse` + + .. py:method:: list_spaces( [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListSpacesResponse Get list of Genie Spaces. @@ -207,4 +188,14 @@ .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + .. py:method:: trash_space(space_id: str) + + Move a Genie Space to the trash. + + :param space_id: str + The ID associated with the Genie space to be sent to the trash. + + + + .. py:method:: wait_get_message_genie_completed(conversation_id: str, message_id: str, space_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GenieMessage], None]]) -> GenieMessage diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index cfa87a8f3..e55aeedc8 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -23,6 +23,7 @@ :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` + The schedule to create. A dashboard is limited to 10 schedules. :returns: :class:`Schedule` @@ -36,6 +37,7 @@ :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` + The subscription to create. A schedule is limited to 100 subscriptions. :returns: :class:`Subscription` @@ -240,6 +242,7 @@ :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` + The schedule to update. :returns: :class:`Schedule` \ No newline at end of file diff --git a/docs/workspace/database/database.rst b/docs/workspace/database/database.rst index d26728ed8..57c7b737f 100644 --- a/docs/workspace/database/database.rst +++ b/docs/workspace/database/database.rst @@ -20,17 +20,27 @@ Create a Database Instance. :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + Instance to create. :returns: :class:`DatabaseInstance` + .. py:method:: create_database_instance_role(instance_name: str, database_instance_role: DatabaseInstanceRole) -> DatabaseInstanceRole + + Create a role for a Database Instance. + + :param instance_name: str + :param database_instance_role: :class:`DatabaseInstanceRole` + + :returns: :class:`DatabaseInstanceRole` + + .. py:method:: create_database_table(table: DatabaseTable) -> DatabaseTable - Create a Database Table. + Create a Database Table. Useful for registering pre-existing PG tables in UC. See + CreateSyncedDatabaseTable for creating synced tables in PG from a source table in UC. :param table: :class:`DatabaseTable` - Next field marker: 13 :returns: :class:`DatabaseTable` @@ -40,7 +50,6 @@ Create a Synced Database Table. :param synced_table: :class:`SyncedDatabaseTable` - Next field marker: 12 :returns: :class:`SyncedDatabaseTable` @@ -74,6 +83,19 @@ + .. py:method:: delete_database_instance_role(instance_name: str, name: str [, allow_missing: Optional[bool], reassign_owned_to: Optional[str]]) + + Deletes a role for a Database Instance. + + :param instance_name: str + :param name: str + :param allow_missing: bool (optional) + This is the AIP standard name for the equivalent of Postgres' `IF EXISTS` option + :param reassign_owned_to: str (optional) + + + + .. py:method:: delete_database_table(name: str) Delete a Database Table. @@ -102,10 +124,13 @@ :returns: :class:`DatabaseInstance` - .. py:method:: generate_database_credential( [, instance_names: Optional[List[str]], request_id: Optional[str]]) -> DatabaseCredential + .. py:method:: generate_database_credential( [, claims: Optional[List[RequestedClaims]], instance_names: Optional[List[str]], request_id: Optional[str]]) -> DatabaseCredential Generates a credential that can be used to access database instances. + :param claims: List[:class:`RequestedClaims`] (optional) + The returned token will be scoped to the union of instance_names and instances containing the + specified UC tables, so instance_names is allowed to be empty. :param instance_names: List[str] (optional) Instances to which the token will be scoped. :param request_id: str (optional) @@ -132,6 +157,16 @@ :returns: :class:`DatabaseInstance` + .. py:method:: get_database_instance_role(instance_name: str, name: str) -> DatabaseInstanceRole + + Gets a role for a Database Instance. + + :param instance_name: str + :param name: str + + :returns: :class:`DatabaseInstanceRole` + + .. py:method:: get_database_table(name: str) -> DatabaseTable Get a Database Table. @@ -150,6 +185,19 @@ :returns: :class:`SyncedDatabaseTable` + .. py:method:: list_database_instance_roles(instance_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DatabaseInstanceRole] + + START OF PG ROLE APIs Section + + :param instance_name: str + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Instances. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseInstanceRole` + + .. py:method:: list_database_instances( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DatabaseInstance] List Database Instances. @@ -169,7 +217,6 @@ :param name: str The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. :param update_mask: str The list of fields to update. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 288ab0ad5..903cf3611 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -204,7 +204,6 @@ Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) - An optional set of health rules that can be defined for this job. :param job_clusters: List[:class:`JobCluster`] (optional) A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. @@ -234,10 +233,6 @@ :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) - Write-only setting. Specifies the user or service principal that the job runs as. If not specified, - the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -1072,7 +1067,6 @@ Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) - An optional set of health rules that can be defined for this job. :param idempotency_token: str (optional) An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the diff --git a/docs/workspace/ml/feature_store.rst b/docs/workspace/ml/feature_store.rst index 6aa1f2398..c85fd5e59 100644 --- a/docs/workspace/ml/feature_store.rst +++ b/docs/workspace/ml/feature_store.rst @@ -16,7 +16,7 @@ Create an Online Feature Store. :param online_store: :class:`OnlineStore` - An OnlineStore is a logical database instance that stores and serves features online. + Online store to create. :returns: :class:`OnlineStore` @@ -72,7 +72,7 @@ :param name: str The name of the online store. This is the unique identifier for the online store. :param online_store: :class:`OnlineStore` - An OnlineStore is a logical database instance that stores and serves features online. + Online store to update. :param update_mask: str The list of fields to update. diff --git a/docs/workspace/ml/index.rst b/docs/workspace/ml/index.rst index 6e6338b70..44b2dbb0f 100644 --- a/docs/workspace/ml/index.rst +++ b/docs/workspace/ml/index.rst @@ -10,4 +10,5 @@ Create and manage experiments, features, and other machine learning artifacts experiments feature_store forecasting + materialized_features model_registry \ No newline at end of file diff --git a/docs/workspace/ml/materialized_features.rst b/docs/workspace/ml/materialized_features.rst new file mode 100644 index 000000000..03c4d53f8 --- /dev/null +++ b/docs/workspace/ml/materialized_features.rst @@ -0,0 +1,84 @@ +``w.materialized_features``: Materialized Features +================================================== +.. currentmodule:: databricks.sdk.service.ml + +.. py:class:: MaterializedFeaturesAPI + + Materialized Features are columns in tables and views that can be directly used as features to train and + serve ML models. + + .. py:method:: create_feature_tag(table_name: str, feature_name: str, feature_tag: FeatureTag) -> FeatureTag + + Creates a FeatureTag. + + :param table_name: str + :param feature_name: str + :param feature_tag: :class:`FeatureTag` + + :returns: :class:`FeatureTag` + + + .. py:method:: delete_feature_tag(table_name: str, feature_name: str, key: str) + + Deletes a FeatureTag. + + :param table_name: str + The name of the feature table. + :param feature_name: str + The name of the feature within the feature table. + :param key: str + The key of the tag to delete. + + + + + .. py:method:: get_feature_lineage(table_name: str, feature_name: str) -> FeatureLineage + + Get Feature Lineage. + + :param table_name: str + The full name of the feature table in Unity Catalog. + :param feature_name: str + The name of the feature. + + :returns: :class:`FeatureLineage` + + + .. py:method:: get_feature_tag(table_name: str, feature_name: str, key: str) -> FeatureTag + + Gets a FeatureTag. + + :param table_name: str + :param feature_name: str + :param key: str + + :returns: :class:`FeatureTag` + + + .. py:method:: list_feature_tags(table_name: str, feature_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FeatureTag] + + Lists FeatureTags. + + :param table_name: str + :param feature_name: str + :param page_size: int (optional) + The maximum number of results to return. + :param page_token: str (optional) + Pagination token to go to the next page based on a previous query. + + :returns: Iterator over :class:`FeatureTag` + + + .. py:method:: update_feature_tag(table_name: str, feature_name: str, key: str, feature_tag: FeatureTag [, update_mask: Optional[str]]) -> FeatureTag + + Updates a FeatureTag. + + :param table_name: str + :param feature_name: str + :param key: str + :param feature_tag: :class:`FeatureTag` + :param update_mask: str (optional) + The list of fields to update. + + :returns: :class:`FeatureTag` + \ No newline at end of file diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 79c1addbf..2d34256e4 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -12,7 +12,7 @@ The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models. - .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse + .. py:method:: approve_transition_request(name: str, version: str, stage: str, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse Approves a model version stage transition request. @@ -20,7 +20,7 @@ Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -92,9 +92,8 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - Creates a new registered model with the name specified in the request body. - - Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. + Creates a new registered model with the name specified in the request body. Throws + `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. :param name: str Register models under this name @@ -143,7 +142,7 @@ :returns: :class:`CreateModelVersionResponse` - .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse + .. py:method:: create_transition_request(name: str, version: str, stage: str [, comment: Optional[str]]) -> CreateTransitionRequestResponse Creates a model version stage transition request. @@ -151,7 +150,7 @@ Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -190,9 +189,7 @@ # cleanup w.model_registry.delete_webhook(id=created.webhook.id) - **NOTE**: This endpoint is in Public Preview. - - Creates a registry webhook. + **NOTE:** This endpoint is in Public Preview. Creates a registry webhook. :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was @@ -226,7 +223,9 @@ :param description: str (optional) User-specified description for the webhook. :param http_url_spec: :class:`HttpUrlSpec` (optional) + External HTTPS URL called on event trigger (by using a POST request). :param job_spec: :class:`JobSpec` (optional) + ID of the job that the webhook runs. :param model_name: str (optional) If model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models. @@ -302,7 +301,7 @@ - .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]]) + .. py:method:: delete_transition_request(name: str, version: str, stage: str, creator: str [, comment: Optional[str]]) -> DeleteTransitionRequestResponse Cancels a model version stage transition request. @@ -310,7 +309,7 @@ Name of the model. :param version: str Version of the model. - :param stage: :class:`DeleteTransitionRequestStage` + :param stage: str Target stage of the transition request. Valid values are: * `None`: The initial stage of a model version. @@ -326,16 +325,14 @@ :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`DeleteTransitionRequestResponse` - .. py:method:: delete_webhook( [, id: Optional[str]]) - - **NOTE:** This endpoint is in Public Preview. + .. py:method:: delete_webhook(id: str) - Deletes a registry webhook. + **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. - :param id: str (optional) + :param id: str Webhook ID required to delete a registry webhook. @@ -456,14 +453,14 @@ Gets a list of all open stage transition requests for the model version. :param name: str - Name of the model. + Name of the registered model. :param version: str Version of the model. :returns: Iterator over :class:`Activity` - .. py:method:: list_webhooks( [, events: Optional[List[RegistryWebhookEvent]], model_name: Optional[str], page_token: Optional[str]]) -> Iterator[RegistryWebhook] + .. py:method:: list_webhooks( [, events: Optional[List[RegistryWebhookEvent]], max_results: Optional[int], model_name: Optional[str], page_token: Optional[str]]) -> Iterator[RegistryWebhook] Usage: @@ -477,23 +474,51 @@ all = w.model_registry.list_webhooks(ml.ListWebhooksRequest()) - **NOTE:** This endpoint is in Public Preview. - - Lists all registry webhooks. + **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. :param events: List[:class:`RegistryWebhookEvent`] (optional) + Events that trigger the webhook. * `MODEL_VERSION_CREATED`: A new model version was created for the + associated model. + + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. + + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. + + * `COMMENT_CREATED`: A user wrote a comment on a registered model. + + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be + specified for a registry-wide webhook, which can be created by not specifying a model name in the + create request. + + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. + + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. + + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. + + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. + + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to + staging. + + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to + production. + + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. + If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. + :param max_results: int (optional) :param model_name: str (optional) - If not specified, all webhooks associated with the specified events are listed, regardless of their - associated model. + Registered model name If not specified, all webhooks associated with the specified events are + listed, regardless of their associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch :returns: Iterator over :class:`RegistryWebhook` - .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse + .. py:method:: reject_transition_request(name: str, version: str, stage: str [, comment: Optional[str]]) -> RejectTransitionRequestResponse Rejects a model version stage transition request. @@ -501,7 +526,7 @@ Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -618,9 +643,7 @@ .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse - **NOTE:** This endpoint is in Public Preview. - - Tests a registry webhook. + **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. :param id: str Webhook ID @@ -631,10 +654,10 @@ :returns: :class:`TestRegistryWebhookResponse` - .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse + .. py:method:: transition_stage(name: str, version: str, stage: str, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] - that also accepts a comment associated with the transition to be recorded.", + that also accepts a comment associated with the transition to be recorded. [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -642,7 +665,7 @@ Name of the model. :param version: str Version of the model. - :param stage: :class:`Stage` + :param stage: str Target stage of the transition. Valid values are: * `None`: The initial stage of a model version. @@ -698,7 +721,7 @@ :returns: :class:`UpdateCommentResponse` - .. py:method:: update_model(name: str [, description: Optional[str]]) + .. py:method:: update_model(name: str [, description: Optional[str]]) -> UpdateModelResponse Usage: @@ -728,10 +751,10 @@ :param description: str (optional) If provided, updates the description for this `registered_model`. - + :returns: :class:`UpdateModelResponse` - .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]]) + .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]]) -> UpdateModelVersionResponse Usage: @@ -763,7 +786,7 @@ :param description: str (optional) If provided, updates the description for this `registered_model`. - + :returns: :class:`UpdateModelVersionResponse` .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions @@ -778,7 +801,7 @@ :returns: :class:`RegisteredModelPermissions` - .. py:method:: update_webhook(id: str [, description: Optional[str], events: Optional[List[RegistryWebhookEvent]], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], status: Optional[RegistryWebhookStatus]]) + .. py:method:: update_webhook(id: str [, description: Optional[str], events: Optional[List[RegistryWebhookEvent]], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], status: Optional[RegistryWebhookStatus]]) -> UpdateWebhookResponse Usage: @@ -803,9 +826,7 @@ # cleanup w.model_registry.delete_webhook(id=created.webhook.id) - **NOTE:** This endpoint is in Public Preview. - - Updates a registry webhook. + **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. :param id: str Webhook ID @@ -843,13 +864,6 @@ :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) - Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real - event. - + :returns: :class:`UpdateWebhookResponse` \ No newline at end of file diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 5464eaa24..7e0ae7b5f 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -105,11 +105,6 @@ Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. :param run_as: :class:`RunAs` (optional) - Write-only setting, available only in Create/Update calls. Specifies the user or service principal - that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is - thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. :param serverless: bool (optional) @@ -353,7 +348,6 @@ :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) - What triggered this update. :param full_refresh: bool (optional) If true, this update will reset all tables before running. :param full_refresh_selection: List[str] (optional) @@ -495,11 +489,6 @@ Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution. :param run_as: :class:`RunAs` (optional) - Write-only setting, available only in Create/Update calls. Specifies the user or service principal - that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is - thrown. :param schema: str (optional) The default schema (database) where tables are read from or published to. :param serverless: bool (optional) diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index 404837e10..d29ca521e 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -27,7 +27,7 @@ :returns: :class:`BuildLogsResponse` - .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] + .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], description: Optional[str], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new serving endpoint. @@ -42,6 +42,7 @@ The budget policy to be applied to the serving endpoint. :param config: :class:`EndpointCoreConfigInput` (optional) The core config of the serving endpoint. + :param description: str (optional) :param rate_limits: List[:class:`RateLimit`] (optional) Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. @@ -55,7 +56,7 @@ See :method:wait_get_serving_endpoint_not_updating for more details. - .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed + .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], description: Optional[str], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed .. py:method:: create_provisioned_throughput_endpoint(name: str, config: PtEndpointCoreConfig [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst index 75f90464b..d435a3575 100644 --- a/docs/workspace/settings/default_namespace.rst +++ b/docs/workspace/settings/default_namespace.rst @@ -58,13 +58,6 @@ :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` - This represents the setting configuration for the default namespace in the Databricks workspace. - Setting the default catalog for the workspace determines the catalog that is used when queries do - not reference a fully qualified 3 level name. For example, if the default catalog is set to - 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - applies when using Unity Catalog-enabled compute. :param field_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst index dd51b8d75..a66fe1afb 100644 --- a/docs/workspace/settings/ip_access_lists.rst +++ b/docs/workspace/settings/ip_access_lists.rst @@ -63,10 +63,6 @@ :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) :returns: :class:`CreateIpAccessListResponse` @@ -181,10 +177,6 @@ :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) @@ -217,10 +209,6 @@ :param label: str (optional) Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) - Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 19f791a2a..fd81e1b24 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -38,7 +38,6 @@ :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` - The delta sharing authentication type. :param comment: str (optional) Description about the provider. :param recipient_profile_str: str (optional) diff --git a/docs/workspace/sharing/recipient_federation_policies.rst b/docs/workspace/sharing/recipient_federation_policies.rst index 5b27b11eb..0cdcd8559 100644 --- a/docs/workspace/sharing/recipient_federation_policies.rst +++ b/docs/workspace/sharing/recipient_federation_policies.rst @@ -51,6 +51,7 @@ :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` + Name of the policy. This is the name of the policy to be created. :returns: :class:`FederationPolicy` diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 572f62cbf..2f921319c 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -42,7 +42,6 @@ :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` - The delta sharing authentication type. :param comment: str (optional) Description about the recipient. :param data_recipient_global_metastore_id: str (optional) diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst index d6ce2bdcf..aa1e61b75 100644 --- a/docs/workspace/sql/dashboard_widgets.rst +++ b/docs/workspace/sql/dashboard_widgets.rst @@ -9,7 +9,7 @@ .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget - Add widget to a dashboard + Adds a widget to a dashboard :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. @@ -27,7 +27,7 @@ .. py:method:: delete(id: str) - Remove widget + Removes a widget from a dashboard :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -37,7 +37,7 @@ .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget - Update existing widget + Updates an existing widget :param id: str Widget ID returned by :method:dashboardwidgets/create diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index 340b606d3..e2849976f 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -28,7 +28,9 @@ # cleanup w.dashboards.delete(dashboard_id=created.id) - Create a dashboard object. + Creates a new dashboard object. Only the name parameter is required in the POST request JSON body. + Other fields can be included when duplicating dashboards with this API. Databricks does not recommend + designing dashboards exclusively using this API.', :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst index bf710ee89..56ebe9dfa 100644 --- a/docs/workspace/sql/query_visualizations_legacy.rst +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -12,7 +12,7 @@ [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization + .. py:method:: create(options: Any, query_id: str, type: str [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization Creates visualization in the query. @@ -21,13 +21,13 @@ [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param options: Any + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. :param query_id: str The identifier returned by :method:queries/create :param type: str The type of visualization: chart, table, pivot table, and so on. - :param options: Any - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. :param description: str (optional) A short description of this visualization. This is not displayed in the UI. :param name: str (optional) @@ -46,7 +46,7 @@ [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str - Widget ID returned by :method:queryvizualisations/create + Widget ID returned by :method:queryvisualizations/create diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 51ab7c086..94911bc1e 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -82,15 +82,12 @@ Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) - Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and - also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. @@ -194,15 +191,12 @@ Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) - Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and - also set the field `enable_serverless_compute` to `true`. :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst index 398f86147..ec8efd3c1 100644 --- a/docs/workspace/vectorsearch/vector_search_indexes.rst +++ b/docs/workspace/vectorsearch/vector_search_indexes.rst @@ -23,10 +23,6 @@ :param primary_key: str Primary key of the index :param index_type: :class:`VectorIndexType` - There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a - source Delta Table, automatically and incrementally updating the index as the underlying data in the - Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and - metadata through our REST and SDK APIs. With this model, the user manages index updates. :param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional) Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)