diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ad1e20e88..19086887a 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -27cebd58ae24e19c95c675db3a93b6046abaca2a \ No newline at end of file +a1c8a01392c7cfef8becc9e9e3eb8236cdcbdfbd \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index da08fc0c6..15a1650ca 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,18 @@ ### Internal Changes ### API Changes +* Add `create_space()` and `update_space()` methods for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service. +* Add `retrieve_user_visible_metrics()` method for [w.vector_search_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vectorsearch/vector_search_endpoints.html) workspace-level service. +* Add `include_serialized_space` field for `databricks.sdk.service.dashboards.GenieGetSpaceRequest`. +* Add `purpose` field for `databricks.sdk.service.dashboards.TextAttachment`. +* Add `budget_policy_id` field for `databricks.sdk.service.database.NewPipelineSpec`. +* Add `connection_parameters` field for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. +* Add `ingest_from_uc_foreign_catalog` field for `databricks.sdk.service.pipelines.IngestionPipelineDefinition`. +* Add `rewind_spec` field for `databricks.sdk.service.pipelines.StartUpdate`. +* Add `type_text` field for `databricks.sdk.service.vectorsearch.ColumnInfo`. +* Add `foreign_catalog` enum value for `databricks.sdk.service.pipelines.IngestionSourceType`. +* Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState`. +* Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`. +* Change `destinations` field for `databricks.sdk.service.catalog.AccessRequestDestinations` to no longer be required. +* [Breaking] Change `destinations` field for `databricks.sdk.service.catalog.AccessRequestDestinations` to no longer be required. +* [Breaking] Change `online_store_config` field for `databricks.sdk.service.ml.MaterializedFeature` to type `databricks.sdk.service.ml.OnlineStoreConfig` dataclass. \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 148bfdc43..e692d92f5 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -846,7 +846,7 @@ def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: @property def rfa(self) -> pkg_catalog.RfaAPI: - """Request for Access enables customers to request access to and manage access request destinations for Unity Catalog securables.""" + """Request for Access enables users to request access for Unity Catalog securables.""" return self._rfa @property diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index f22edec9b..c6d6c2dbc 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -1474,7 +1474,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: class ComputeSize(Enum): LARGE = "LARGE" - LIQUID = "LIQUID" MEDIUM = "MEDIUM" diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index a99c5405a..094b3ecd0 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -23,9 +23,6 @@ @dataclass class AccessRequestDestinations: - destinations: List[NotificationDestination] - """The access request destinations for the securable.""" - securable: Securable """The securable for which the access request destinations are being retrieved.""" @@ -33,6 +30,9 @@ class AccessRequestDestinations: """Indicates whether any destinations are hidden from the caller due to a lack of permissions. This value is true if the caller does not have permission to see all destinations.""" + destinations: Optional[List[NotificationDestination]] = None + """The access request destinations for the securable.""" + def as_dict(self) -> dict: """Serializes the AccessRequestDestinations into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1740,7 +1740,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 47""" + """Next Id: 48""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1750,7 +1750,6 @@ class ConnectionType(Enum): HTTP = "HTTP" MYSQL = "MYSQL" ORACLE = "ORACLE" - PALANTIR = "PALANTIR" POSTGRESQL = "POSTGRESQL" POWER_BI = "POWER_BI" REDSHIFT = "REDSHIFT" @@ -8745,7 +8744,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271""" + """Latest kind: CONNECTION_SLACK_OAUTH_U2M_MAPPING = 272; Next id:273""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" @@ -8787,7 +8786,6 @@ class SecurableKind(Enum): TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" - TABLE_FOREIGN_PALANTIR = "TABLE_FOREIGN_PALANTIR" TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" @@ -14650,12 +14648,10 @@ def list_quotas( class RfaAPI: - """Request for Access enables customers to request access to and manage access request destinations for Unity - Catalog securables. + """Request for Access enables users to request access for Unity Catalog securables. - These APIs provide a standardized way to update, get, and request to access request destinations. - Fine-grained authorization ensures that only users with appropriate permissions can manage access request - destinations.""" + These APIs provide a standardized way for securable owners (or users with MANAGE privileges) to manage + access request destinations.""" def __init__(self, api_client): self._api = api_client diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 64e6f4b0a..473c7cca1 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -171,7 +171,7 @@ class AwsAttributes: be of a form like "us-west-2a". The provided availability zone must be in the same region as the Databricks deployment. For example, "us-west-2a" is not a valid zone id if the Databricks deployment resides in the "us-east-1" region. This is an optional field at cluster creation, and - if not specified, a default zone will be used. If the zone specified is "auto", will try to + if not specified, the zone "auto" will be used. If the zone specified is "auto", will try to place cluster in a zone with high availability, and will retry placement in a different AZ if there is not enough capacity. @@ -7112,7 +7112,6 @@ class TerminationReasonCode(Enum): DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" - DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" DRIVER_EVICTION = "DRIVER_EVICTION" DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" @@ -7191,8 +7190,6 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" - NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" - NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -7205,7 +7202,6 @@ class TerminationReasonCode(Enum): SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" - SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 5bf772f27..07a3de8c0 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1161,7 +1161,6 @@ class MessageErrorType(Enum): INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION = "INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION" INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" - INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" INVALID_CHAT_COMPLETION_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" INVALID_COMPLETION_REQUEST_EXCEPTION = "INVALID_COMPLETION_REQUEST_EXCEPTION" INVALID_FUNCTION_CALL_EXCEPTION = "INVALID_FUNCTION_CALL_EXCEPTION" @@ -1639,6 +1638,9 @@ class TextAttachment: id: Optional[str] = None + purpose: Optional[TextAttachmentPurpose] = None + """Purpose/intent of this text attachment""" + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1646,6 +1648,8 @@ def as_dict(self) -> dict: body["content"] = self.content if self.id is not None: body["id"] = self.id + if self.purpose is not None: + body["purpose"] = self.purpose.value return body def as_shallow_dict(self) -> dict: @@ -1655,12 +1659,22 @@ def as_shallow_dict(self) -> dict: body["content"] = self.content if self.id is not None: body["id"] = self.id + if self.purpose is not None: + body["purpose"] = self.purpose return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" - return cls(content=d.get("content", None), id=d.get("id", None)) + return cls( + content=d.get("content", None), id=d.get("id", None), purpose=_enum(d, "purpose", TextAttachmentPurpose) + ) + + +class TextAttachmentPurpose(Enum): + """Purpose/intent of a text attachment""" + + FOLLOW_UP_QUESTION = "FOLLOW_UP_QUESTION" @dataclass @@ -1787,6 +1801,50 @@ def create_message_and_wait( timeout=timeout ) + def create_space( + self, + warehouse_id: str, + serialized_space: str, + *, + description: Optional[str] = None, + parent_path: Optional[str] = None, + title: Optional[str] = None, + ) -> GenieSpace: + """Creates a Genie space from a serialized payload. + + :param warehouse_id: str + Warehouse to associate with the new space + :param serialized_space: str + Serialized export model for the space contents + :param description: str (optional) + Optional description + :param parent_path: str (optional) + Parent folder path where the space will be registered + :param title: str (optional) + Optional title override + + :returns: :class:`GenieSpace` + """ + + body = {} + if description is not None: + body["description"] = description + if parent_path is not None: + body["parent_path"] = parent_path + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/genie/spaces", body=body, headers=headers) + return GenieSpace.from_dict(res) + def delete_conversation(self, space_id: str, conversation_id: str): """Delete a conversation. @@ -1992,20 +2050,26 @@ def get_message_query_result_by_attachment( ) return GenieGetMessageQueryResultResponse.from_dict(res) - def get_space(self, space_id: str) -> GenieSpace: + def get_space(self, space_id: str, *, include_serialized_space: Optional[bool] = None) -> GenieSpace: """Get details of a Genie Space. :param space_id: str The ID associated with the Genie space + :param include_serialized_space: bool (optional) + Whether to include the serialized space export in the response. Requires at least CAN EDIT + permission on the space. :returns: :class:`GenieSpace` """ + query = {} + if include_serialized_space is not None: + query["include_serialized_space"] = include_serialized_space headers = { "Accept": "application/json", } - res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", query=query, headers=headers) return GenieSpace.from_dict(res) def list_conversation_messages( @@ -2184,6 +2248,48 @@ def trash_space(self, space_id: str): self._api.do("DELETE", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + def update_space( + self, + space_id: str, + *, + description: Optional[str] = None, + serialized_space: Optional[str] = None, + title: Optional[str] = None, + warehouse_id: Optional[str] = None, + ) -> GenieSpace: + """Updates a Genie space with a serialized payload. + + :param space_id: str + Genie space ID + :param description: str (optional) + Optional description + :param serialized_space: str (optional) + Serialized export model for the space contents (full replacement) + :param title: str (optional) + Optional title override + :param warehouse_id: str (optional) + Optional warehouse override + + :returns: :class:`GenieSpace` + """ + + body = {} + if description is not None: + body["description"] = description + if serialized_space is not None: + body["serialized_space"] = serialized_space + if title is not None: + body["title"] = title + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/genie/spaces/{space_id}", body=body, headers=headers) + return GenieSpace.from_dict(res) + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index b0bbbd7cb..21d6ea9c9 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -824,6 +824,9 @@ class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other fields of pipeline are still inferred by table def internally""" + budget_policy_id: Optional[str] = None + """Budget policy to set on the newly created pipeline.""" + storage_catalog: Optional[str] = None """This field needs to be specified if the destination catalog is a managed postgres catalog. @@ -839,6 +842,8 @@ class NewPipelineSpec: def as_dict(self) -> dict: """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -848,6 +853,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id if self.storage_catalog is not None: body["storage_catalog"] = self.storage_catalog if self.storage_schema is not None: @@ -857,7 +864,11 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) + return cls( + budget_policy_id=d.get("budget_policy_id", None), + storage_catalog=d.get("storage_catalog", None), + storage_schema=d.get("storage_schema", None), + ) class ProvisioningInfoState(Enum): diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index e84121f29..4c0d13ab6 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -2525,8 +2525,9 @@ def list( start_index: Optional[int] = None, ) -> Iterator[AccountGroup]: """Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this endpoint - will not return members. Instead, members should be retrieved by iterating through `Get group - details`. + will no longer return members. Instead, members should be retrieved by iterating through `Get group + details`. Existing accounts that rely on this attribute will not be impacted and will continue + receiving member data as before. :param attributes: str (optional) Comma-separated list of attributes to return in response. diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 1ca8e631c..83b35a218 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -2915,10 +2915,10 @@ class JobSettings: environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by serverless tasks - of this job. An environment is required to be present for serverless tasks. For serverless - notebook tasks, the environment is accessible in the notebook environment panel. For other - serverless tasks, the task environment is required to be specified using environment_key in the - task settings.""" + of this job. For serverless notebook tasks, if the environment_key is not specified, the + notebook environment will be used if present. If a jobs environment is specified, it will + override the notebook environment. For other serverless tasks, the task environment is required + to be specified using environment_key in the task settings.""" format: Optional[Format] = None """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. @@ -8246,7 +8246,7 @@ class JobsAPI: scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use - scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or + scheduling system. You can implement job tasks using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java applications. You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in @@ -8397,9 +8397,10 @@ def create( as when this job is deleted. :param environments: List[:class:`JobEnvironment`] (optional) A list of task execution environment specifications that can be referenced by serverless tasks of - this job. An environment is required to be present for serverless tasks. For serverless notebook - tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, - the task environment is required to be specified using environment_key in the task settings. + this job. For serverless notebook tasks, if the environment_key is not specified, the notebook + environment will be used if present. If a jobs environment is specified, it will override the + notebook environment. For other serverless tasks, the task environment is required to be specified + using environment_key in the task settings. :param format: :class:`Format` (optional) Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 5e5ccc267..84d761c36 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -3,7 +3,6 @@ from __future__ import annotations import logging -import uuid from dataclasses import dataclass from enum import Enum from typing import Any, Dict, Iterator, List, Optional @@ -2165,6 +2164,8 @@ class PersonalizationRequest: recipient_type: Optional[DeltaSharingRecipientType] = None share: Optional[ShareInfo] = None + """Share information is required for data listings but should be empty/ignored for non-data + listings (MCP and App).""" status: Optional[PersonalizationRequestStatus] = None @@ -4096,8 +4097,6 @@ def update( :returns: :class:`UpdatePersonalizationRequestResponse` """ - if request_id is None or request_id == "": - request_id = str(uuid.uuid4()) body = {} if reason is not None: body["reason"] = reason diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 94fd823ca..ca0684c87 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1385,7 +1385,11 @@ class Feature: """The filter condition applied to the source data before aggregation.""" lineage_context: Optional[LineageContext] = None - """Lineage context information for this feature.""" + """WARNING: This field is primarily intended for internal use by Databricks systems and is + automatically populated when features are created through Databricks notebooks or jobs. Users + should not manually set this field as incorrect values may lead to inaccurate lineage tracking + or unexpected behavior. This field will be set by feature-engineering client and should be left + unset by SDK and terraform users.""" def as_dict(self) -> dict: """Serializes the Feature into a dictionary suitable for use as a JSON request body.""" @@ -3154,7 +3158,7 @@ class MaterializedFeature: offline_store_config: Optional[OfflineStoreConfig] = None - online_store_config: Optional[OnlineStore] = None + online_store_config: Optional[OnlineStoreConfig] = None pipeline_schedule_state: Optional[MaterializedFeaturePipelineScheduleState] = None """The schedule state of the materialization pipeline.""" @@ -3209,7 +3213,7 @@ def from_dict(cls, d: Dict[str, Any]) -> MaterializedFeature: last_materialization_time=d.get("last_materialization_time", None), materialized_feature_id=d.get("materialized_feature_id", None), offline_store_config=_from_dict(d, "offline_store_config", OfflineStoreConfig), - online_store_config=_from_dict(d, "online_store_config", OnlineStore), + online_store_config=_from_dict(d, "online_store_config", OnlineStoreConfig), pipeline_schedule_state=_enum(d, "pipeline_schedule_state", MaterializedFeaturePipelineScheduleState), table_name=d.get("table_name", None), ) @@ -4013,6 +4017,60 @@ def from_dict(cls, d: Dict[str, Any]) -> OnlineStore: ) +@dataclass +class OnlineStoreConfig: + """Configuration for online store destination.""" + + catalog_name: str + """The Unity Catalog catalog name. This name is also used as the Lakebase logical database name.""" + + schema_name: str + """The Unity Catalog schema name.""" + + table_name_prefix: str + """Prefix for Unity Catalog table name. The materialized feature will be stored in a Lakebase table + with this prefix and a generated postfix.""" + + online_store_name: str + """The name of the target online store.""" + + def as_dict(self) -> dict: + """Serializes the OnlineStoreConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.online_store_name is not None: + body["online_store_name"] = self.online_store_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OnlineStoreConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.online_store_name is not None: + body["online_store_name"] = self.online_store_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineStoreConfig: + """Deserializes the OnlineStoreConfig from a dictionary.""" + return cls( + catalog_name=d.get("catalog_name", None), + online_store_name=d.get("online_store_name", None), + schema_name=d.get("schema_name", None), + table_name_prefix=d.get("table_name_prefix", None), + ) + + class OnlineStoreState(Enum): AVAILABLE = "AVAILABLE" diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 9ab410419..d38088dda 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -22,6 +22,33 @@ # all definitions in this file are in alphabetical order +@dataclass +class ConnectionParameters: + source_catalog: Optional[str] = None + """Source catalog for initial connection. This is necessary for schema exploration in some database + systems like Oracle, and optional but nice-to-have in some other database systems like Postgres. + For Oracle databases, this maps to a service name.""" + + def as_dict(self) -> dict: + """Serializes the ConnectionParameters into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ConnectionParameters into a shallow dictionary of its immediate attributes.""" + body = {} + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ConnectionParameters: + """Deserializes the ConnectionParameters from a dictionary.""" + return cls(source_catalog=d.get("source_catalog", None)) + + @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -554,6 +581,9 @@ class IngestionGatewayPipelineDefinition: """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" + connection_parameters: Optional[ConnectionParameters] = None + """Optional, Internal. Parameters required to establish an initial connection with the source.""" + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Spark Declarative Pipelines @@ -566,6 +596,8 @@ def as_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters.as_dict() if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -581,6 +613,8 @@ def as_shallow_dict(self) -> dict: body["connection_id"] = self.connection_id if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.connection_parameters: + body["connection_parameters"] = self.connection_parameters if self.gateway_storage_catalog is not None: body["gateway_storage_catalog"] = self.gateway_storage_catalog if self.gateway_storage_name is not None: @@ -595,6 +629,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionGatewayPipelineDefinition: return cls( connection_id=d.get("connection_id", None), connection_name=d.get("connection_name", None), + connection_parameters=_from_dict(d, "connection_parameters", ConnectionParameters), gateway_storage_catalog=d.get("gateway_storage_catalog", None), gateway_storage_name=d.get("gateway_storage_name", None), gateway_storage_schema=d.get("gateway_storage_schema", None), @@ -607,6 +642,11 @@ class IngestionPipelineDefinition: """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" + ingest_from_uc_foreign_catalog: Optional[bool] = None + """Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs directly + without the need to specify a UC connection or ingestion gateway. The `source_catalog` fields in + objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.""" + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" @@ -634,6 +674,8 @@ def as_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.netsuite_jar_path is not None: @@ -653,6 +695,8 @@ def as_shallow_dict(self) -> dict: body = {} if self.connection_name is not None: body["connection_name"] = self.connection_name + if self.ingest_from_uc_foreign_catalog is not None: + body["ingest_from_uc_foreign_catalog"] = self.ingest_from_uc_foreign_catalog if self.ingestion_gateway_id is not None: body["ingestion_gateway_id"] = self.ingestion_gateway_id if self.netsuite_jar_path is not None: @@ -672,6 +716,7 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" return cls( connection_name=d.get("connection_name", None), + ingest_from_uc_foreign_catalog=d.get("ingest_from_uc_foreign_catalog", None), ingestion_gateway_id=d.get("ingestion_gateway_id", None), netsuite_jar_path=d.get("netsuite_jar_path", None), objects=_repeated_dict(d, "objects", IngestionConfig), @@ -828,31 +873,20 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionWorkdayRepor class IngestionSourceType(Enum): BIGQUERY = "BIGQUERY" - CONFLUENCE = "CONFLUENCE" DYNAMICS365 = "DYNAMICS365" FOREIGN_CATALOG = "FOREIGN_CATALOG" GA4_RAW_DATA = "GA4_RAW_DATA" - GOOGLE_ADS = "GOOGLE_ADS" - GUIDEWIRE = "GUIDEWIRE" - HUBSPOT = "HUBSPOT" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" - META_MARKETING = "META_MARKETING" MYSQL = "MYSQL" NETSUITE = "NETSUITE" ORACLE = "ORACLE" POSTGRESQL = "POSTGRESQL" - REDSHIFT = "REDSHIFT" SALESFORCE = "SALESFORCE" - SALESFORCE_MARKETING_CLOUD = "SALESFORCE_MARKETING_CLOUD" SERVICENOW = "SERVICENOW" SHAREPOINT = "SHAREPOINT" - SQLDW = "SQLDW" SQLSERVER = "SQLSERVER" TERADATA = "TERADATA" - TIKTOK_ADS = "TIKTOK_ADS" - WORKDAY_HCM = "WORKDAY_HCM" WORKDAY_RAAS = "WORKDAY_RAAS" - ZENDESK = "ZENDESK" @dataclass @@ -2526,6 +2560,97 @@ def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: ) +@dataclass +class RewindDatasetSpec: + """Configuration for rewinding a specific dataset.""" + + cascade: Optional[bool] = None + """Whether to cascade the rewind to dependent datasets. Must be specified.""" + + identifier: Optional[str] = None + """The identifier of the dataset (e.g., "main.foo.tbl1").""" + + reset_checkpoints: Optional[bool] = None + """Whether to reset checkpoints for this dataset.""" + + def as_dict(self) -> dict: + """Serializes the RewindDatasetSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cascade is not None: + body["cascade"] = self.cascade + if self.identifier is not None: + body["identifier"] = self.identifier + if self.reset_checkpoints is not None: + body["reset_checkpoints"] = self.reset_checkpoints + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RewindDatasetSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cascade is not None: + body["cascade"] = self.cascade + if self.identifier is not None: + body["identifier"] = self.identifier + if self.reset_checkpoints is not None: + body["reset_checkpoints"] = self.reset_checkpoints + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RewindDatasetSpec: + """Deserializes the RewindDatasetSpec from a dictionary.""" + return cls( + cascade=d.get("cascade", None), + identifier=d.get("identifier", None), + reset_checkpoints=d.get("reset_checkpoints", None), + ) + + +@dataclass +class RewindSpec: + """Information about a rewind being requested for this pipeline or some of the datasets in it.""" + + datasets: Optional[List[RewindDatasetSpec]] = None + """List of datasets to rewind with specific configuration for each. When not specified, all + datasets will be rewound with cascade = true and reset_checkpoints = true.""" + + dry_run: Optional[bool] = None + """If true, this is a dry run and we should emit the RewindSummary but not perform the rewind.""" + + rewind_timestamp: Optional[str] = None + """The base timestamp to rewind to. Must be specified.""" + + def as_dict(self) -> dict: + """Serializes the RewindSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.datasets: + body["datasets"] = [v.as_dict() for v in self.datasets] + if self.dry_run is not None: + body["dry_run"] = self.dry_run + if self.rewind_timestamp is not None: + body["rewind_timestamp"] = self.rewind_timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RewindSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.datasets: + body["datasets"] = self.datasets + if self.dry_run is not None: + body["dry_run"] = self.dry_run + if self.rewind_timestamp is not None: + body["rewind_timestamp"] = self.rewind_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RewindSpec: + """Deserializes the RewindSpec from a dictionary.""" + return cls( + datasets=_repeated_dict(d, "datasets", RewindDatasetSpec), + dry_run=d.get("dry_run", None), + rewind_timestamp=d.get("rewind_timestamp", None), + ) + + @dataclass class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service @@ -3483,8 +3608,8 @@ def create( return CreatePipelineResponse.from_dict(res) def delete(self, pipeline_id: str): - """Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and - its tables. You cannot undo this action. + """Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade to all + pipeline tables. Please reach out to Databricks support for assistance to undo this action. :param pipeline_id: str @@ -3741,6 +3866,7 @@ def start_update( full_refresh: Optional[bool] = None, full_refresh_selection: Optional[List[str]] = None, refresh_selection: Optional[List[str]] = None, + rewind_spec: Optional[RewindSpec] = None, validate_only: Optional[bool] = None, ) -> StartUpdateResponse: """Starts a new update for the pipeline. If there is already an active update for the pipeline, the @@ -3758,6 +3884,8 @@ def start_update( A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh. + :param rewind_spec: :class:`RewindSpec` (optional) + The information about the requested rewind operation. If specified this is a rewind mode update. :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. @@ -3774,6 +3902,8 @@ def start_update( body["full_refresh_selection"] = [v for v in full_refresh_selection] if refresh_selection is not None: body["refresh_selection"] = [v for v in refresh_selection] + if rewind_spec is not None: + body["rewind_spec"] = rewind_spec.as_dict() if validate_only is not None: body["validate_only"] = validate_only headers = { diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index c6126a23e..99996e980 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1180,6 +1180,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingNetworkConnectivityConfig class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState(Enum): + CREATE_FAILED = "CREATE_FAILED" + CREATING = "CREATING" DISCONNECTED = "DISCONNECTED" ESTABLISHED = "ESTABLISHED" EXPIRED = "EXPIRED" @@ -3825,6 +3827,8 @@ def from_dict(cls, d: Dict[str, Any]) -> NccAzurePrivateEndpointRule: class NccAzurePrivateEndpointRuleConnectionState(Enum): + CREATE_FAILED = "CREATE_FAILED" + CREATING = "CREATING" DISCONNECTED = "DISCONNECTED" ESTABLISHED = "ESTABLISHED" EXPIRED = "EXPIRED" @@ -4165,8 +4169,6 @@ def from_dict(cls, d: Dict[str, Any]) -> NccPrivateEndpointRule: class NccPrivateEndpointRulePrivateLinkConnectionState(Enum): - CREATE_FAILED = "CREATE_FAILED" - CREATING = "CREATING" DISCONNECTED = "DISCONNECTED" ESTABLISHED = "ESTABLISHED" EXPIRED = "EXPIRED" diff --git a/databricks/sdk/service/settingsv2.py b/databricks/sdk/service/settingsv2.py index a529d7a5a..fabd932ba 100755 --- a/databricks/sdk/service/settingsv2.py +++ b/databricks/sdk/service/settingsv2.py @@ -486,39 +486,72 @@ class RestrictWorkspaceAdminsMessageStatus(Enum): @dataclass class Setting: aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None + """Setting value for aibi_dashboard_embedding_access_policy setting. This is the setting value set + by consumers, check effective_aibi_dashboard_embedding_access_policy for final setting value.""" aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None + """Setting value for aibi_dashboard_embedding_approved_domains setting. This is the setting value + set by consumers, check effective_aibi_dashboard_embedding_approved_domains for final setting + value.""" automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None + """Setting value for automatic_cluster_update_workspace setting. This is the setting value set by + consumers, check effective_automatic_cluster_update_workspace for final setting value.""" boolean_val: Optional[BooleanMessage] = None + """Setting value for boolean type setting. This is the setting value set by consumers, check + effective_boolean_val for final setting value.""" effective_aibi_dashboard_embedding_access_policy: Optional[AibiDashboardEmbeddingAccessPolicy] = None + """Effective setting value for aibi_dashboard_embedding_access_policy setting. This is the final + effective value of setting. To set a value use aibi_dashboard_embedding_access_policy.""" effective_aibi_dashboard_embedding_approved_domains: Optional[AibiDashboardEmbeddingApprovedDomains] = None + """Effective setting value for aibi_dashboard_embedding_approved_domains setting. This is the final + effective value of setting. To set a value use aibi_dashboard_embedding_approved_domains.""" effective_automatic_cluster_update_workspace: Optional[ClusterAutoRestartMessage] = None + """Effective setting value for automatic_cluster_update_workspace setting. This is the final + effective value of setting. To set a value use automatic_cluster_update_workspace.""" effective_boolean_val: Optional[BooleanMessage] = None + """Effective setting value for boolean type setting. This is the final effective value of setting. + To set a value use boolean_val.""" effective_integer_val: Optional[IntegerMessage] = None + """Effective setting value for integer type setting. This is the final effective value of setting. + To set a value use integer_val.""" effective_personal_compute: Optional[PersonalComputeMessage] = None + """Effective setting value for personal_compute setting. This is the final effective value of + setting. To set a value use personal_compute.""" effective_restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None + """Effective setting value for restrict_workspace_admins setting. This is the final effective value + of setting. To set a value use restrict_workspace_admins.""" effective_string_val: Optional[StringMessage] = None + """Effective setting value for string type setting. This is the final effective value of setting. + To set a value use string_val.""" integer_val: Optional[IntegerMessage] = None + """Setting value for integer type setting. This is the setting value set by consumers, check + effective_integer_val for final setting value.""" name: Optional[str] = None """Name of the setting.""" personal_compute: Optional[PersonalComputeMessage] = None + """Setting value for personal_compute setting. This is the setting value set by consumers, check + effective_personal_compute for final setting value.""" restrict_workspace_admins: Optional[RestrictWorkspaceAdminsMessage] = None + """Setting value for restrict_workspace_admins setting. This is the setting value set by consumers, + check effective_restrict_workspace_admins for final setting value.""" string_val: Optional[StringMessage] = None + """Setting value for string type setting. This is the setting value set by consumers, check + effective_string_val for final setting value.""" def as_dict(self) -> dict: """Serializes the Setting into a dictionary suitable for use as a JSON request body.""" @@ -658,7 +691,8 @@ class SettingsMetadata: """Name of the setting.""" type: Optional[str] = None - """Type of the setting. To set this setting, the value sent must match this type.""" + """Sample message depicting the type of the setting. To set this setting, the value sent must match + this type.""" def as_dict(self) -> dict: """Serializes the SettingsMetadata into a dictionary suitable for use as a JSON request body.""" @@ -786,7 +820,9 @@ def list_account_settings_metadata( def patch_public_account_setting(self, name: str, setting: Setting) -> Setting: """Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of - setting available via public APIs at account level. + setting available via public APIs at account level. To determine the correct field to include in a + patch request, refer to the type field of the setting returned in the + :method:settingsv2/listaccountsettingsmetadata response. :param name: str :param setting: :class:`Setting` @@ -817,6 +853,7 @@ def get_public_workspace_setting(self, name: str) -> Setting: of setting available via public APIs. :param name: str + Name of the setting :returns: :class:`Setting` """ @@ -869,9 +906,12 @@ def list_workspace_settings_metadata( def patch_public_workspace_setting(self, name: str, setting: Setting) -> Setting: """Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for - list of setting available via public APIs at workspace level. + list of setting available via public APIs at workspace level. To determine the correct field to + include in a patch request, refer to the type field of the setting returned in the + :method:settingsv2/listworkspacesettingsmetadata response. :param name: str + Name of the setting :param setting: :class:`Setting` :returns: :class:`Setting` diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 2b839f687..403f401d3 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -950,6 +950,7 @@ class AlertV2OperandColumn: name: str aggregation: Optional[Aggregation] = None + """If not set, the behavior is equivalent to using `First row` in the UI.""" display: Optional[str] = None @@ -6302,7 +6303,6 @@ class TerminationReasonCode(Enum): DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" - DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" DRIVER_EVICTION = "DRIVER_EVICTION" DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" @@ -6381,8 +6381,6 @@ class TerminationReasonCode(Enum): NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG = "NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" - NO_ACTIVATED_K8S = "NO_ACTIVATED_K8S" - NO_ACTIVATED_K8S_TESTING_TAG = "NO_ACTIVATED_K8S_TESTING_TAG" NO_MATCHED_K8S = "NO_MATCHED_K8S" NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" @@ -6395,7 +6393,6 @@ class TerminationReasonCode(Enum): SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" - SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION = "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index a0b731ffa..567447366 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -26,11 +26,16 @@ class ColumnInfo: name: Optional[str] = None """Name of the column.""" + type_text: Optional[str] = None + """Data type of the column (e.g., "string", "int", "array")""" + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.name is not None: body["name"] = self.name + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: @@ -38,12 +43,14 @@ def as_shallow_dict(self) -> dict: body = {} if self.name is not None: body["name"] = self.name + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls(name=d.get("name", None)) + return cls(name=d.get("name", None), type_text=d.get("type_text", None)) @dataclass @@ -737,6 +744,153 @@ def from_dict(cls, d: Dict[str, Any]) -> MapStringValueEntry: return cls(key=d.get("key", None), value=_from_dict(d, "value", Value)) +@dataclass +class Metric: + """Metric specification""" + + labels: Optional[List[MetricLabel]] = None + """Metric labels""" + + name: Optional[str] = None + """Metric name""" + + percentile: Optional[float] = None + """Percentile for the metric""" + + def as_dict(self) -> dict: + """Serializes the Metric into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.labels: + body["labels"] = [v.as_dict() for v in self.labels] + if self.name is not None: + body["name"] = self.name + if self.percentile is not None: + body["percentile"] = self.percentile + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Metric into a shallow dictionary of its immediate attributes.""" + body = {} + if self.labels: + body["labels"] = self.labels + if self.name is not None: + body["name"] = self.name + if self.percentile is not None: + body["percentile"] = self.percentile + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Metric: + """Deserializes the Metric from a dictionary.""" + return cls( + labels=_repeated_dict(d, "labels", MetricLabel), + name=d.get("name", None), + percentile=d.get("percentile", None), + ) + + +@dataclass +class MetricLabel: + """Label for a metric""" + + name: Optional[str] = None + """Label name""" + + value: Optional[str] = None + """Label value""" + + def as_dict(self) -> dict: + """Serializes the MetricLabel into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MetricLabel into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MetricLabel: + """Deserializes the MetricLabel from a dictionary.""" + return cls(name=d.get("name", None), value=d.get("value", None)) + + +@dataclass +class MetricValue: + """Single metric value at a specific timestamp""" + + timestamp: Optional[int] = None + """Timestamp of the metric value (milliseconds since epoch)""" + + value: Optional[float] = None + """Metric value""" + + def as_dict(self) -> dict: + """Serializes the MetricValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MetricValue into a shallow dictionary of its immediate attributes.""" + body = {} + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MetricValue: + """Deserializes the MetricValue from a dictionary.""" + return cls(timestamp=d.get("timestamp", None), value=d.get("value", None)) + + +@dataclass +class MetricValues: + """Collection of metric values for a specific metric""" + + metric: Optional[Metric] = None + """Metric specification""" + + values: Optional[List[MetricValue]] = None + """Time series of metric values""" + + def as_dict(self) -> dict: + """Serializes the MetricValues into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metric: + body["metric"] = self.metric.as_dict() + if self.values: + body["values"] = [v.as_dict() for v in self.values] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MetricValues into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metric: + body["metric"] = self.metric + if self.values: + body["values"] = self.values + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MetricValues: + """Deserializes the MetricValues from a dictionary.""" + return cls(metric=_from_dict(d, "metric", Metric), values=_repeated_dict(d, "values", MetricValue)) + + @dataclass class MiniVectorIndex: creator: Optional[str] = None @@ -998,6 +1152,44 @@ def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) +@dataclass +class RetrieveUserVisibleMetricsResponse: + """Response containing user-visible metrics""" + + metric_values: Optional[List[MetricValues]] = None + """Collection of metric values""" + + next_page_token: Optional[str] = None + """A token that can be used to get the next page of results. If not present, there are no more + results to show.""" + + def as_dict(self) -> dict: + """Serializes the RetrieveUserVisibleMetricsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metric_values: + body["metric_values"] = [v.as_dict() for v in self.metric_values] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RetrieveUserVisibleMetricsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metric_values: + body["metric_values"] = self.metric_values + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RetrieveUserVisibleMetricsResponse: + """Deserializes the RetrieveUserVisibleMetricsResponse from a dictionary.""" + return cls( + metric_values=_repeated_dict(d, "metric_values", MetricValues), + next_page_token=d.get("next_page_token", None), + ) + + @dataclass class ScanVectorIndexResponse: """Response to a scan vector index request.""" @@ -1519,6 +1711,53 @@ def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[Endpoi return query["page_token"] = json["next_page_token"] + def retrieve_user_visible_metrics( + self, + name: str, + *, + end_time: Optional[str] = None, + granularity_in_seconds: Optional[int] = None, + metrics: Optional[List[Metric]] = None, + page_token: Optional[str] = None, + start_time: Optional[str] = None, + ) -> RetrieveUserVisibleMetricsResponse: + """Retrieve user-visible metrics for an endpoint + + :param name: str + Vector search endpoint name + :param end_time: str (optional) + End time for metrics query + :param granularity_in_seconds: int (optional) + Granularity in seconds + :param metrics: List[:class:`Metric`] (optional) + List of metrics to retrieve + :param page_token: str (optional) + Token for pagination + :param start_time: str (optional) + Start time for metrics query + + :returns: :class:`RetrieveUserVisibleMetricsResponse` + """ + + body = {} + if end_time is not None: + body["end_time"] = end_time + if granularity_in_seconds is not None: + body["granularity_in_seconds"] = granularity_in_seconds + if metrics is not None: + body["metrics"] = [v.as_dict() for v in metrics] + if page_token is not None: + body["page_token"] = page_token + if start_time is not None: + body["start_time"] = start_time + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/vector-search/endpoints/{name}/metrics", body=body, headers=headers) + return RetrieveUserVisibleMetricsResponse.from_dict(res) + def update_endpoint_budget_policy( self, endpoint_name: str, budget_policy_id: str ) -> PatchEndpointBudgetPolicyResponse: diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 7ef18fddb..7eda4c2d7 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -2658,7 +2658,9 @@ def get_permission_levels( """Gets the permission levels that a user can have on an object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. @@ -2681,7 +2683,9 @@ def get_permissions(self, workspace_object_type: str, workspace_object_id: str) parent objects or root object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. @@ -2840,7 +2844,9 @@ def set_permissions( object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) @@ -2872,7 +2878,9 @@ def update_permissions( parent objects or root object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) diff --git a/docs/account/iam/groups_v2.rst b/docs/account/iam/groups_v2.rst index 9a38fb63d..622277161 100644 --- a/docs/account/iam/groups_v2.rst +++ b/docs/account/iam/groups_v2.rst @@ -52,8 +52,9 @@ .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[AccountGroup] Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this endpoint - will not return members. Instead, members should be retrieved by iterating through `Get group - details`. + will no longer return members. Instead, members should be retrieved by iterating through `Get group + details`. Existing accounts that rely on this attribute will not be impacted and will continue + receiving member data as before. :param attributes: str (optional) Comma-separated list of attributes to return in response. diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 2a8043172..fa9c2ee3e 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - a.workspace_assignment.update( + _ = a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index b71c1707e..d63648d58 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - creds = a.credentials.create( + role = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=creds.credentials_id) + a.credentials.delete(credentials_id=role.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/settingsv2/settings_v2.rst b/docs/account/settingsv2/settings_v2.rst index 03224db02..da0206e2e 100644 --- a/docs/account/settingsv2/settings_v2.rst +++ b/docs/account/settingsv2/settings_v2.rst @@ -38,7 +38,9 @@ .. py:method:: patch_public_account_setting(name: str, setting: Setting) -> Setting Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for list of - setting available via public APIs at account level. + setting available via public APIs at account level. To determine the correct field to include in a + patch request, refer to the type field of the setting returned in the + :method:settingsv2/listaccountsettingsmetadata response. :param name: str :param setting: :class:`Setting` diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index 320c875e1..2bc765a2a 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -329,9 +329,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: LARGE :value: "LARGE" - .. py:attribute:: LIQUID - :value: "LIQUID" - .. py:attribute:: MEDIUM :value: "MEDIUM" diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 44209d4b9..64c296f7c 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -279,7 +279,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - Next Id: 47 + Next Id: 48 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -305,9 +305,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ORACLE :value: "ORACLE" - .. py:attribute:: PALANTIR - :value: "PALANTIR" - .. py:attribute:: POSTGRESQL :value: "POSTGRESQL" @@ -1500,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SecurableKind - Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271 + Latest kind: CONNECTION_SLACK_OAUTH_U2M_MAPPING = 272; Next id:273 .. py:attribute:: TABLE_DB_STORAGE :value: "TABLE_DB_STORAGE" @@ -1604,9 +1601,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE_FOREIGN_ORACLE :value: "TABLE_FOREIGN_ORACLE" - .. py:attribute:: TABLE_FOREIGN_PALANTIR - :value: "TABLE_FOREIGN_PALANTIR" - .. py:attribute:: TABLE_FOREIGN_POSTGRESQL :value: "TABLE_FOREIGN_POSTGRESQL" diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index c6064252a..7ded58e8f 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -1216,9 +1216,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DOCKER_INVALID_OS_EXCEPTION :value: "DOCKER_INVALID_OS_EXCEPTION" - .. py:attribute:: DRIVER_DNS_RESOLUTION_FAILURE - :value: "DRIVER_DNS_RESOLUTION_FAILURE" - .. py:attribute:: DRIVER_EVICTION :value: "DRIVER_EVICTION" @@ -1441,12 +1438,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NFS_MOUNT_FAILURE :value: "NFS_MOUNT_FAILURE" - .. py:attribute:: NO_ACTIVATED_K8S - :value: "NO_ACTIVATED_K8S" - - .. py:attribute:: NO_ACTIVATED_K8S_TESTING_TAG - :value: "NO_ACTIVATED_K8S_TESTING_TAG" - .. py:attribute:: NO_MATCHED_K8S :value: "NO_MATCHED_K8S" @@ -1483,9 +1474,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SECRET_RESOLUTION_ERROR :value: "SECRET_RESOLUTION_ERROR" - .. py:attribute:: SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION - :value: "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" - .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index df004c847..af9a747b3 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -214,9 +214,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION :value: "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" - .. py:attribute:: INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION - :value: "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" - .. py:attribute:: INVALID_CHAT_COMPLETION_JSON_EXCEPTION :value: "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" @@ -385,6 +382,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TextAttachmentPurpose + + Purpose/intent of a text attachment + + .. py:attribute:: FOLLOW_UP_QUESTION + :value: "FOLLOW_UP_QUESTION" + .. autoclass:: TrashDashboardResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 844e66245..3b514c298 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -604,6 +604,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: OnlineStoreConfig + :members: + :undoc-members: + .. py:class:: OnlineStoreState .. py:attribute:: AVAILABLE diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index cc2f14411..cd6a437f8 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -4,6 +4,10 @@ Delta Live Tables These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.pipelines`` module. .. py:currentmodule:: databricks.sdk.service.pipelines +.. autoclass:: ConnectionParameters + :members: + :undoc-members: + .. autoclass:: CreatePipelineResponse :members: :undoc-members: @@ -139,9 +143,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: BIGQUERY :value: "BIGQUERY" - .. py:attribute:: CONFLUENCE - :value: "CONFLUENCE" - .. py:attribute:: DYNAMICS365 :value: "DYNAMICS365" @@ -151,21 +152,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: GA4_RAW_DATA :value: "GA4_RAW_DATA" - .. py:attribute:: GOOGLE_ADS - :value: "GOOGLE_ADS" - - .. py:attribute:: GUIDEWIRE - :value: "GUIDEWIRE" - - .. py:attribute:: HUBSPOT - :value: "HUBSPOT" - .. py:attribute:: MANAGED_POSTGRESQL :value: "MANAGED_POSTGRESQL" - .. py:attribute:: META_MARKETING - :value: "META_MARKETING" - .. py:attribute:: MYSQL :value: "MYSQL" @@ -178,42 +167,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: POSTGRESQL :value: "POSTGRESQL" - .. py:attribute:: REDSHIFT - :value: "REDSHIFT" - .. py:attribute:: SALESFORCE :value: "SALESFORCE" - .. py:attribute:: SALESFORCE_MARKETING_CLOUD - :value: "SALESFORCE_MARKETING_CLOUD" - .. py:attribute:: SERVICENOW :value: "SERVICENOW" .. py:attribute:: SHAREPOINT :value: "SHAREPOINT" - .. py:attribute:: SQLDW - :value: "SQLDW" - .. py:attribute:: SQLSERVER :value: "SQLSERVER" .. py:attribute:: TERADATA :value: "TERADATA" - .. py:attribute:: TIKTOK_ADS - :value: "TIKTOK_ADS" - - .. py:attribute:: WORKDAY_HCM - :value: "WORKDAY_HCM" - .. py:attribute:: WORKDAY_RAAS :value: "WORKDAY_RAAS" - .. py:attribute:: ZENDESK - :value: "ZENDESK" - .. autoclass:: ListPipelineEventsResponse :members: :undoc-members: @@ -398,6 +369,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RewindDatasetSpec + :members: + :undoc-members: + +.. autoclass:: RewindSpec + :members: + :undoc-members: + .. autoclass:: RunAs :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index b47a84770..a1687d876 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -208,6 +208,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + .. py:attribute:: CREATE_FAILED + :value: "CREATE_FAILED" + + .. py:attribute:: CREATING + :value: "CREATING" + .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" @@ -601,6 +607,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: NccAzurePrivateEndpointRuleConnectionState + .. py:attribute:: CREATE_FAILED + :value: "CREATE_FAILED" + + .. py:attribute:: CREATING + :value: "CREATING" + .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" @@ -641,12 +653,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: NccPrivateEndpointRulePrivateLinkConnectionState - .. py:attribute:: CREATE_FAILED - :value: "CREATE_FAILED" - - .. py:attribute:: CREATING - :value: "CREATING" - .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 865aba6c5..66f6340c9 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -1296,9 +1296,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DOCKER_INVALID_OS_EXCEPTION :value: "DOCKER_INVALID_OS_EXCEPTION" - .. py:attribute:: DRIVER_DNS_RESOLUTION_FAILURE - :value: "DRIVER_DNS_RESOLUTION_FAILURE" - .. py:attribute:: DRIVER_EVICTION :value: "DRIVER_EVICTION" @@ -1521,12 +1518,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NFS_MOUNT_FAILURE :value: "NFS_MOUNT_FAILURE" - .. py:attribute:: NO_ACTIVATED_K8S - :value: "NO_ACTIVATED_K8S" - - .. py:attribute:: NO_ACTIVATED_K8S_TESTING_TAG - :value: "NO_ACTIVATED_K8S_TESTING_TAG" - .. py:attribute:: NO_MATCHED_K8S :value: "NO_MATCHED_K8S" @@ -1563,9 +1554,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SECRET_RESOLUTION_ERROR :value: "SECRET_RESOLUTION_ERROR" - .. py:attribute:: SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION - :value: "SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION" - .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index b8bd46536..33e37bdd8 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -109,6 +109,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Metric + :members: + :undoc-members: + +.. autoclass:: MetricLabel + :members: + :undoc-members: + +.. autoclass:: MetricValue + :members: + :undoc-members: + +.. autoclass:: MetricValues + :members: + :undoc-members: + .. autoclass:: MiniVectorIndex :members: :undoc-members: @@ -147,6 +163,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RetrieveUserVisibleMetricsResponse + :members: + :undoc-members: + .. autoclass:: ScanVectorIndexResponse :members: :undoc-members: diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 612800956..934dc3e6b 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,20 +30,22 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) - created = w.external_locations.create( + external_location = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + credential_name=storage_credential.name, + comment="created via SDK", + url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(name=storage_credential.name) + w.external_locations.delete(name=external_location.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -105,20 +107,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) - _ = w.external_locations.get(name=created.name) + _ = w.external_locations.get(get=created.name) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -140,10 +142,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list() + all = w.external_locations.list(catalog.ListExternalLocationsRequest()) Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on @@ -190,24 +193,24 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) _ = w.external_locations.update( name=created.name, credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external diff --git a/docs/workspace/catalog/rfa.rst b/docs/workspace/catalog/rfa.rst index 3019403bb..e5e05073e 100644 --- a/docs/workspace/catalog/rfa.rst +++ b/docs/workspace/catalog/rfa.rst @@ -4,12 +4,10 @@ .. py:class:: RfaAPI - Request for Access enables customers to request access to and manage access request destinations for Unity - Catalog securables. + Request for Access enables users to request access for Unity Catalog securables. - These APIs provide a standardized way to update, get, and request to access request destinations. - Fine-grained authorization ensures that only users with appropriate permissions can manage access request - destinations. + These APIs provide a standardized way for securable owners (or users with MANAGE privileges) to manage + access request destinations. .. py:method:: batch_create_access_requests( [, requests: Optional[List[CreateAccessRequest]]]) -> BatchCreateAccessRequestsResponse diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index d8111141e..c16a8e093 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,14 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) # cleanup - w.storage_credentials.delete(name=credential.name) + w.storage_credentials.delete(name=storage_credential.name) Creates a new storage credential. @@ -123,11 +124,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) + all = w.storage_credentials.list() Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore @@ -173,17 +173,17 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Updates a storage credential on the metastore. diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index b33bef940..8de553fc2 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -156,7 +156,7 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) + summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index 3ceb286ef..258ffd728 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -29,6 +29,24 @@ .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + .. py:method:: create_space(warehouse_id: str, serialized_space: str [, description: Optional[str], parent_path: Optional[str], title: Optional[str]]) -> GenieSpace + + Creates a Genie space from a serialized payload. + + :param warehouse_id: str + Warehouse to associate with the new space + :param serialized_space: str + Serialized export model for the space contents + :param description: str (optional) + Optional description + :param parent_path: str (optional) + Parent folder path where the space will be registered + :param title: str (optional) + Optional title override + + :returns: :class:`GenieSpace` + + .. py:method:: delete_conversation(space_id: str, conversation_id: str) Delete a conversation. @@ -150,12 +168,15 @@ :returns: :class:`GenieGetMessageQueryResultResponse` - .. py:method:: get_space(space_id: str) -> GenieSpace + .. py:method:: get_space(space_id: str [, include_serialized_space: Optional[bool]]) -> GenieSpace Get details of a Genie Space. :param space_id: str The ID associated with the Genie space + :param include_serialized_space: bool (optional) + Whether to include the serialized space export in the response. Requires at least CAN EDIT + permission on the space. :returns: :class:`GenieSpace` @@ -248,4 +269,22 @@ + .. py:method:: update_space(space_id: str [, description: Optional[str], serialized_space: Optional[str], title: Optional[str], warehouse_id: Optional[str]]) -> GenieSpace + + Updates a Genie space with a serialized payload. + + :param space_id: str + Genie space ID + :param description: str (optional) + Optional description + :param serialized_space: str (optional) + Serialized export model for the space contents (full replacement) + :param title: str (optional) + Optional title override + :param warehouse_id: str (optional) + Optional warehouse override + + :returns: :class:`GenieSpace` + + .. py:method:: wait_get_message_genie_completed(conversation_id: str, message_id: str, space_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GenieMessage], None]]) -> GenieMessage diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index ea24afd1a..15524c53e 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 0b82986de..c22e872d0 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -10,7 +10,7 @@ scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use - scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or + scheduling system. You can implement job tasks using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java applications. You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in @@ -188,9 +188,10 @@ as when this job is deleted. :param environments: List[:class:`JobEnvironment`] (optional) A list of task execution environment specifications that can be referenced by serverless tasks of - this job. An environment is required to be present for serverless tasks. For serverless notebook - tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, - the task environment is required to be specified using environment_key in the task settings. + this job. For serverless notebook tasks, if the environment_key is not specified, the notebook + environment will be used if present. If a jobs environment is specified, it will override the + notebook environment. For other serverless tasks, the task environment is required to be specified + using environment_key in the task settings. :param format: :class:`Format` (optional) Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 98d803a63..55c7f0898 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -120,7 +120,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index b0bada615..e147d5491 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -129,8 +129,8 @@ .. py:method:: delete(pipeline_id: str) - Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and - its tables. You cannot undo this action. + Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade to all + pipeline tables. Please reach out to Databricks support for assistance to undo this action. :param pipeline_id: str @@ -344,7 +344,7 @@ :returns: :class:`PipelinePermissions` - .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse + .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], rewind_spec: Optional[RewindSpec], validate_only: Optional[bool]]) -> StartUpdateResponse Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. @@ -361,6 +361,8 @@ A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh. + :param rewind_spec: :class:`RewindSpec` (optional) + The information about the requested rewind operation. If specified this is a rewind mode update. :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. diff --git a/docs/workspace/settingsv2/workspace_settings_v2.rst b/docs/workspace/settingsv2/workspace_settings_v2.rst index 2d6d379df..10534c2fa 100644 --- a/docs/workspace/settingsv2/workspace_settings_v2.rst +++ b/docs/workspace/settingsv2/workspace_settings_v2.rst @@ -12,6 +12,7 @@ of setting available via public APIs. :param name: str + Name of the setting :returns: :class:`Setting` @@ -39,9 +40,12 @@ .. py:method:: patch_public_workspace_setting(name: str, setting: Setting) -> Setting Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata for - list of setting available via public APIs at workspace level. + list of setting available via public APIs at workspace level. To determine the correct field to + include in a patch request, refer to the type field of the setting returned in the + :method:settingsv2/listworkspacesettingsmetadata response. :param name: str + Name of the setting :param setting: :class:`Setting` :returns: :class:`Setting` diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 0dfb63fbf..f0081b3f2 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SELECT 1", + query_text="SHOW TABLES", ) ) diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst index 47a8fa59a..53c0bdd7a 100644 --- a/docs/workspace/vectorsearch/vector_search_endpoints.rst +++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst @@ -55,6 +55,26 @@ :returns: Iterator over :class:`EndpointInfo` + .. py:method:: retrieve_user_visible_metrics(name: str [, end_time: Optional[str], granularity_in_seconds: Optional[int], metrics: Optional[List[Metric]], page_token: Optional[str], start_time: Optional[str]]) -> RetrieveUserVisibleMetricsResponse + + Retrieve user-visible metrics for an endpoint + + :param name: str + Vector search endpoint name + :param end_time: str (optional) + End time for metrics query + :param granularity_in_seconds: int (optional) + Granularity in seconds + :param metrics: List[:class:`Metric`] (optional) + List of metrics to retrieve + :param page_token: str (optional) + Token for pagination + :param start_time: str (optional) + Start time for metrics query + + :returns: :class:`RetrieveUserVisibleMetricsResponse` + + .. py:method:: update_endpoint_budget_policy(endpoint_name: str, budget_policy_id: str) -> PatchEndpointBudgetPolicyResponse Update the budget policy of an endpoint diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index e1b7d12b9..eea7852cd 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -79,7 +79,7 @@ notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) + export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook) Exports an object or the contents of an entire directory. @@ -111,7 +111,9 @@ Gets the permission levels that a user can have on an object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. @@ -124,7 +126,9 @@ parent objects or root object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. @@ -172,14 +176,14 @@ w = WorkspaceClient() - notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" w.workspace.import_( - content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), + path=notebook, format=workspace.ImportFormat.SOURCE, - language=workspace.Language.SQL, - overwrite=true_, - path=notebook_path, + language=workspace.Language.PYTHON, + content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(), + overwrite=True, ) Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. @@ -223,14 +227,16 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient w = WorkspaceClient() - names = [] - for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): - names.append(i.path) - assert len(names) > 0 + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + objects = w.workspace.list(path=os.path.dirname(notebook)) List workspace objects @@ -263,7 +269,9 @@ object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) @@ -277,7 +285,9 @@ parent objects or root object. :param workspace_object_type: str - The workspace object type for which to get or manage permissions. + The workspace object type for which to get or manage permissions. Could be one of the following: + alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files, genie, notebooks, + queries :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) diff --git a/tests/databricks/sdk/service/lrotesting.py b/tests/databricks/sdk/service/lrotesting.py index 679118220..c06822e8f 100755 --- a/tests/databricks/sdk/service/lrotesting.py +++ b/tests/databricks/sdk/service/lrotesting.py @@ -4,7 +4,6 @@ import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum from typing import Any, Dict, List, Optional @@ -20,11 +19,7 @@ @dataclass class DatabricksServiceExceptionWithDetailsProto: - """Serialization format for DatabricksServiceException with error details. This message doesn't - work for ScalaPB-04 as google.protobuf.Any is only available to ScalaPB-09. Note the definition - of this message should be in sync with DatabricksServiceExceptionProto defined in - /api-base/proto/legacy/databricks.proto except the later one doesn't have the error details - field defined.""" + """Databricks Error that is returned by all Databricks APIs.""" details: Optional[List[dict]] = None """@pbjson-skip""" @@ -174,24 +169,15 @@ class Operation: metadata: Optional[dict] = None """Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such - metadata. Any method that returns a long-running operation should document the metadata type, if - any.""" + metadata.""" name: Optional[str] = None """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with - `operations/{unique_id}`. - - Note: multi-segment resource names are not yet supported in the RPC framework and SDK/TF. Until - that support is added, `name` must be string without internal `/` separators.""" + `operations/{unique_id}`.""" response: Optional[dict] = None - """The normal, successful response of the operation. If the original method returns no data on - success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is - standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the - response should have the type `XxxResponse`, where `Xxx` is the original method name. For - example, if the original method name is `TakeSnapshot()`, the inferred response type is - `TakeSnapshotResponse`.""" + """The normal, successful response of the operation.""" def as_dict(self) -> dict: """Serializes the Operation into a dictionary suitable for use as a JSON request body.""" @@ -380,13 +366,13 @@ def __init__(self, impl: LroTestingAPI, operation: Operation): self._operation = operation def wait(self, opts: Optional[lro.LroOptions] = None) -> TestResource: - """Wait blocks until the long-running operation is completed with default 20 min - timeout. If the operation didn't finish within the timeout, this function will - raise an error of type TimeoutError, otherwise returns successful response and - any errors encountered. + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. :param opts: :class:`LroOptions` - Timeout options (default: 20 minutes) + Timeout options (default: polls indefinitely) :returns: :class:`TestResource` """ @@ -414,7 +400,7 @@ def poll_operation(): return test_resource, None - return poll(poll_operation, timeout=opts.timeout if opts is not None else timedelta(minutes=20)) + return poll(poll_operation, timeout=opts.timeout if opts is not None else None) def cancel(self): """Starts asynchronous cancellation on a long-running operation. The server @@ -463,13 +449,13 @@ def __init__(self, impl: LroTestingAPI, operation: Operation): self._operation = operation def wait(self, opts: Optional[lro.LroOptions] = None): - """Wait blocks until the long-running operation is completed with default 20 min - timeout. If the operation didn't finish within the timeout, this function will - raise an error of type TimeoutError, otherwise returns successful response and - any errors encountered. + """Wait blocks until the long-running operation is completed. If no timeout is + specified, this will poll indefinitely. If a timeout is provided and the operation + didn't finish within the timeout, this function will raise an error of type + TimeoutError, otherwise returns successful response and any errors encountered. :param opts: :class:`LroOptions` - Timeout options (default: 20 minutes) + Timeout options (default: polls indefinitely) :returns: :class:`Any /* MISSING TYPE */` """ @@ -495,7 +481,7 @@ def poll_operation(): return {}, None - poll(poll_operation, timeout=opts.timeout if opts is not None else timedelta(minutes=20)) + poll(poll_operation, timeout=opts.timeout if opts is not None else None) def cancel(self): """Starts asynchronous cancellation on a long-running operation. The server diff --git a/tests/generated/test_json_marshall.py b/tests/generated/test_json_marshall.py index 16fc6fb26..bf5460f2e 100755 --- a/tests/generated/test_json_marshall.py +++ b/tests/generated/test_json_marshall.py @@ -190,7 +190,7 @@ def _fieldmask(d: str) -> FieldMask: required_string="non_default_string", required_struct={}, required_timestamp=_timestamp("2023-12-31T23:59:59Z"), - required_value=json.loads("{}"), + required_value=json.loads('{"key": "value"}'), test_required_enum=TestEnum.TEST_ENUM_TWO, ), """{ @@ -198,6 +198,7 @@ def _fieldmask(d: str) -> FieldMask: "required_int32": 42, "required_int64": 1234567890123456789, "required_bool": true, + "required_value": {"key": "value"}, "required_message": {}, "test_required_enum": "TEST_ENUM_TWO", "required_duration": "7200s",