From 7a591a5a4354e5fa10964c0ea3d226c00689825b Mon Sep 17 00:00:00 2001 From: Divyansh Vijayvergia Date: Tue, 14 Oct 2025 15:47:56 +0000 Subject: [PATCH 1/2] update sdk to latest spec --- .gitattributes | 1 - databricks/sdk/service/billing.py | 3 +- databricks/sdk/service/cleanrooms.py | 3 +- databricks/sdk/service/dashboards.py | 3 +- databricks/sdk/service/jobs.py | 3 +- databricks/sdk/service/pipelines.py | 3 +- databricks/sdk/service/sharing.py | 3 +- docs/workspace/catalog/catalogs.rst | 7 +- docs/workspace/catalog/external_locations.rst | 34 +++-- .../workspace/catalog/storage_credentials.rst | 10 +- docs/workspace/compute/clusters.rst | 3 +- docs/workspace/jobs/jobs.rst | 44 ++---- docs/workspace/ml/model_registry.rst | 11 +- docs/workspace/sql/queries.rst | 2 +- docs/workspace/workspace/workspace.rst | 17 +-- tests/databricks/sdk/service/lrotesting.py | 125 +----------------- tests/generated/test_http_call.py | 4 +- 17 files changed, 66 insertions(+), 210 deletions(-) diff --git a/.gitattributes b/.gitattributes index 8bee41bad..cf7e87876 100755 --- a/.gitattributes +++ b/.gitattributes @@ -34,4 +34,3 @@ databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true test_http_call.py linguist-generated=true test_json_marshall.py linguist-generated=true -test_lro_call.py linguist-generated=true diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 3758028c2..46562a5a8 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -7,12 +7,13 @@ from enum import Enum from typing import Any, BinaryIO, Dict, Iterator, List, Optional -from databricks.sdk.service import compute from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import compute + # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 299d623e3..fb4cc75fa 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -10,13 +10,14 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from databricks.sdk.service import catalog, jobs, settings, sharing from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import catalog, jobs, settings, sharing + # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 69b544f7d..e53026431 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -10,7 +10,6 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from databricks.sdk.service import sql from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) @@ -19,6 +18,8 @@ _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import sql + # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 8aa530264..8549f2d4c 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -10,7 +10,6 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from databricks.sdk.service import compute from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) @@ -19,6 +18,8 @@ _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import compute + # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 6ea0874e5..def1d874f 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -10,7 +10,6 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional -from databricks.sdk.service import compute from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict, _repeated_enum) @@ -19,6 +18,8 @@ _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import compute + # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 172307d67..6c6c35e19 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -7,13 +7,14 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional -from databricks.sdk.service import catalog from databricks.sdk.service._internal import (_enum, _from_dict, _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") +from databricks.sdk.service import catalog + # all definitions in this file are in alphabetical order diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 47cff9e95..5349b1d12 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -156,12 +156,13 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, comment="updated") + _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index b8b70227f..0a03cb17f 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -107,20 +105,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) - _ = w.external_locations.get(get=created.name) + _ = w.external_locations.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -194,24 +192,24 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) _ = w.external_locations.update( name=created.name, credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 48666f7ab..fe21a00d2 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -32,11 +32,11 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Creates a new storage credential. @@ -98,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(name=created.name) + by_name = w.storage_credentials.get(get=created.name) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index d46b8ecd0..db78626ff 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -647,11 +647,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index e1d8f668f..bc3b141e5 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -357,21 +357,23 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - run = w.jobs.submit( - run_name=f"sdk-{time.time_ns()}", + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", tasks=[ - jobs.SubmitTask( + jobs.Task( + description="test", existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key=f"sdk-{time.time_ns()}", + task_key="test", + timeout_seconds=0, ) ], - ).result() + ) - output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) + by_id = w.jobs.get(job_id=created_job.job_id) # cleanup - w.jobs.delete_run(run_id=run.run_id) + w.jobs.delete(job_id=created_job.job_id) Get a single job. @@ -520,37 +522,11 @@ .. code-block:: - import os - import time - from databricks.sdk import WorkspaceClient - from databricks.sdk.service import jobs w = WorkspaceClient() - notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - - cluster_id = ( - w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] - ) - - created_job = w.jobs.create( - name=f"sdk-{time.time_ns()}", - tasks=[ - jobs.Task( - description="test", - existing_cluster_id=cluster_id, - notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key="test", - timeout_seconds=0, - ) - ], - ) - - run_list = w.jobs.list_runs(job_id=created_job.job_id) - - # cleanup - w.jobs.delete(job_id=created_job.job_id) + job_list = w.jobs.list(expand_tasks=False) List jobs. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 9a6c8f286..601ffd87d 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -91,6 +91,8 @@ w = WorkspaceClient() model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -734,13 +736,14 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - model = w.model_registry.get_model(name=created.registered_model.name) + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") - w.model_registry.update_model( - name=model.registered_model_databricks.name, + w.model_registry.update_model_version( description=f"sdk-{time.time_ns()}", + name=created.model_version.name, + version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 0dfb63fbf..f0081b3f2 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SELECT 1", + query_text="SHOW TABLES", ) ) diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index fbcb5374b..66e0546c9 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -79,7 +79,7 @@ notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) + export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook) Exports an object or the contents of an entire directory. @@ -175,18 +175,11 @@ notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" w.workspace.import_( - path=notebook_path, - overwrite=True, + content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, - language=workspace.Language.PYTHON, - content=base64.b64encode( - ( - """import time - time.sleep(10) - dbutils.notebook.exit('hello') - """ - ).encode() - ).decode(), + language=workspace.Language.SQL, + overwrite=true_, + path=notebook_path, ) Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. diff --git a/tests/databricks/sdk/service/lrotesting.py b/tests/databricks/sdk/service/lrotesting.py index 6a4325b53..0d3c97014 100755 --- a/tests/databricks/sdk/service/lrotesting.py +++ b/tests/databricks/sdk/service/lrotesting.py @@ -4,12 +4,9 @@ import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum from typing import Any, Dict, List, Optional -from databricks.sdk.common import lro -from databricks.sdk.retries import RetryError, poll from databricks.sdk.service._internal import _enum, _from_dict _LOG = logging.getLogger("databricks.sdk") @@ -269,40 +266,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TestResource: return cls(id=d.get("id", None), name=d.get("name", None)) -@dataclass -class TestResourceOperationMetadata: - """Metadata for test resource operations""" - - progress_percent: Optional[int] = None - """Progress percentage (0-100)""" - - resource_id: Optional[str] = None - """ID of the resource being operated on""" - - def as_dict(self) -> dict: - """Serializes the TestResourceOperationMetadata into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.progress_percent is not None: - body["progress_percent"] = self.progress_percent - if self.resource_id is not None: - body["resource_id"] = self.resource_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TestResourceOperationMetadata into a shallow dictionary of its immediate attributes.""" - body = {} - if self.progress_percent is not None: - body["progress_percent"] = self.progress_percent - if self.resource_id is not None: - body["resource_id"] = self.resource_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TestResourceOperationMetadata: - """Deserializes the TestResourceOperationMetadata from a dictionary.""" - return cls(progress_percent=d.get("progress_percent", None), resource_id=d.get("resource_id", None)) - - class LroTestingAPI: """Test service for Long Running Operations""" @@ -317,7 +280,7 @@ def cancel_operation(self, name: str): self._api.do("POST", f"/api/2.0/lro-testing/operations/{name}/cancel", headers=headers) - def create_test_resource(self, resource: TestResource) -> CreateTestResourceOperation: + def create_test_resource(self, resource: TestResource) -> Operation: """Simple method to create test resource for LRO testing :param resource: :class:`TestResource` @@ -332,8 +295,7 @@ def create_test_resource(self, resource: TestResource) -> CreateTestResourceOper } res = self._api.do("POST", "/api/2.0/lro-testing/resources", body=body, headers=headers) - operation = Operation.from_dict(res) - return CreateTestResourceOperation(self, operation) + return Operation.from_dict(res) def get_operation(self, name: str) -> Operation: @@ -359,86 +321,3 @@ def get_test_resource(self, resource_id: str) -> TestResource: res = self._api.do("GET", f"/api/2.0/lro-testing/resources/{resource_id}", headers=headers) return TestResource.from_dict(res) - - -class CreateTestResourceOperation: - """Long-running operation for create_test_resource""" - - def __init__(self, impl: LroTestingAPI, operation: Operation): - self._impl = impl - self._operation = operation - - def wait(self, opts: Optional[lro.LroOptions] = None) -> TestResource: - """Wait blocks until the long-running operation is completed with default 20 min - timeout. If the operation didn't finish within the timeout, this function will - raise an error of type TimeoutError, otherwise returns successful response and - any errors encountered. - - :param opts: :class:`LroOptions` - Timeout options (default: 20 minutes) - - :returns: :class:`TestResource` - """ - - def poll_operation(): - operation = self._impl.get_operation(name=self._operation.name) - - # Update local operation state - self._operation = operation - - if not operation.done: - return None, RetryError.continues("operation still in progress") - - if operation.error: - error_msg = operation.error.message if operation.error.message else "unknown error" - if operation.error.error_code: - error_msg = f"[{operation.error.error_code}] {error_msg}" - return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) - - # Operation completed successfully, unmarshal response. - if operation.response is None: - return None, RetryError.halt(Exception("operation completed but no response available")) - - test_resource = TestResource.from_dict(operation.response) - - return test_resource, None - - return poll(poll_operation, timeout=opts.timeout if opts is not None else timedelta(minutes=20)) - - def cancel(self): - """Starts asynchronous cancellation on a long-running operation. The server - makes a best effort to cancel the operation, but success is not guaranteed. - """ - self._impl.cancel_operation(name=self._operation.name) - - def name(self) -> str: - """Name returns the name of the long-running operation. The name is assigned - by the server and is unique within the service from which the operation is created. - - :returns: str - """ - return self._operation.name - - def metadata(self) -> TestResourceOperationMetadata: - """Metadata returns metadata associated with the long-running operation. - If the metadata is not available, the returned metadata is None. - - :returns: :class:`TestResourceOperationMetadata` or None - """ - if self._operation.metadata is None: - return None - - return TestResourceOperationMetadata.from_dict(self._operation.metadata) - - def done(self) -> bool: - """Done reports whether the long-running operation has completed. - - :returns: bool - """ - # Refresh the operation state first - operation = self._impl.get_operation(name=self._operation.name) - - # Update local operation state - self._operation = operation - - return operation.done diff --git a/tests/generated/test_http_call.py b/tests/generated/test_http_call.py index eecb6cf9c..e31805e29 100755 --- a/tests/generated/test_http_call.py +++ b/tests/generated/test_http_call.py @@ -1,5 +1,7 @@ # Code generated by Databricks SDK Generator. DO NOT EDIT. +import json + import pytest from google.protobuf.duration_pb2 import Duration from google.protobuf.timestamp_pb2 import Timestamp @@ -53,7 +55,7 @@ def _fieldmask(d: str) -> FieldMask: lambda requests_mock: requests_mock.patch("http://localhost/api/2.0/http-call/update_string/789/true"), lambda client: client.update_resource( resource=Resource( - any_field={"key": "value"}, + any_field=json.loads('{"key": "value"}'), nested_path_param_bool=True, nested_path_param_int=789, nested_path_param_string="update_string", From a8f0566d01565542f70a3df06ce57aea22db842c Mon Sep 17 00:00:00 2001 From: Divyansh Vijayvergia Date: Tue, 14 Oct 2025 15:53:16 +0000 Subject: [PATCH 2/2] updated with latest genkit --- .gitattributes | 1 + databricks/sdk/service/billing.py | 3 +- databricks/sdk/service/cleanrooms.py | 3 +- databricks/sdk/service/dashboards.py | 3 +- databricks/sdk/service/jobs.py | 3 +- databricks/sdk/service/pipelines.py | 3 +- databricks/sdk/service/sharing.py | 3 +- tests/databricks/sdk/service/lrotesting.py | 125 ++++++++++++++++++++- tests/generated/test_http_call.py | 4 +- 9 files changed, 131 insertions(+), 17 deletions(-) diff --git a/.gitattributes b/.gitattributes index cf7e87876..8bee41bad 100755 --- a/.gitattributes +++ b/.gitattributes @@ -34,3 +34,4 @@ databricks/sdk/service/vectorsearch.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true test_http_call.py linguist-generated=true test_json_marshall.py linguist-generated=true +test_lro_call.py linguist-generated=true diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 46562a5a8..3758028c2 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -7,13 +7,12 @@ from enum import Enum from typing import Any, BinaryIO, Dict, Iterator, List, Optional +from databricks.sdk.service import compute from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import compute - # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index fb4cc75fa..299d623e3 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -10,14 +10,13 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service import catalog, jobs, settings, sharing from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import catalog, jobs, settings, sharing - # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index e53026431..69b544f7d 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -10,6 +10,7 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service import sql from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) @@ -18,8 +19,6 @@ _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import sql - # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 8549f2d4c..8aa530264 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -10,6 +10,7 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service import compute from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict) @@ -18,8 +19,6 @@ _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import compute - # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index def1d874f..6ea0874e5 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -10,6 +10,7 @@ from enum import Enum from typing import Any, Callable, Dict, Iterator, List, Optional +from databricks.sdk.service import compute from databricks.sdk.service._internal import (Wait, _enum, _from_dict, _repeated_dict, _repeated_enum) @@ -18,8 +19,6 @@ _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import compute - # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 6c6c35e19..172307d67 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -7,14 +7,13 @@ from enum import Enum from typing import Any, Dict, Iterator, List, Optional +from databricks.sdk.service import catalog from databricks.sdk.service._internal import (_enum, _from_dict, _repeated_dict, _repeated_enum) _LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import catalog - # all definitions in this file are in alphabetical order diff --git a/tests/databricks/sdk/service/lrotesting.py b/tests/databricks/sdk/service/lrotesting.py index 0d3c97014..6a4325b53 100755 --- a/tests/databricks/sdk/service/lrotesting.py +++ b/tests/databricks/sdk/service/lrotesting.py @@ -4,9 +4,12 @@ import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum from typing import Any, Dict, List, Optional +from databricks.sdk.common import lro +from databricks.sdk.retries import RetryError, poll from databricks.sdk.service._internal import _enum, _from_dict _LOG = logging.getLogger("databricks.sdk") @@ -266,6 +269,40 @@ def from_dict(cls, d: Dict[str, Any]) -> TestResource: return cls(id=d.get("id", None), name=d.get("name", None)) +@dataclass +class TestResourceOperationMetadata: + """Metadata for test resource operations""" + + progress_percent: Optional[int] = None + """Progress percentage (0-100)""" + + resource_id: Optional[str] = None + """ID of the resource being operated on""" + + def as_dict(self) -> dict: + """Serializes the TestResourceOperationMetadata into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.progress_percent is not None: + body["progress_percent"] = self.progress_percent + if self.resource_id is not None: + body["resource_id"] = self.resource_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TestResourceOperationMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.progress_percent is not None: + body["progress_percent"] = self.progress_percent + if self.resource_id is not None: + body["resource_id"] = self.resource_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TestResourceOperationMetadata: + """Deserializes the TestResourceOperationMetadata from a dictionary.""" + return cls(progress_percent=d.get("progress_percent", None), resource_id=d.get("resource_id", None)) + + class LroTestingAPI: """Test service for Long Running Operations""" @@ -280,7 +317,7 @@ def cancel_operation(self, name: str): self._api.do("POST", f"/api/2.0/lro-testing/operations/{name}/cancel", headers=headers) - def create_test_resource(self, resource: TestResource) -> Operation: + def create_test_resource(self, resource: TestResource) -> CreateTestResourceOperation: """Simple method to create test resource for LRO testing :param resource: :class:`TestResource` @@ -295,7 +332,8 @@ def create_test_resource(self, resource: TestResource) -> Operation: } res = self._api.do("POST", "/api/2.0/lro-testing/resources", body=body, headers=headers) - return Operation.from_dict(res) + operation = Operation.from_dict(res) + return CreateTestResourceOperation(self, operation) def get_operation(self, name: str) -> Operation: @@ -321,3 +359,86 @@ def get_test_resource(self, resource_id: str) -> TestResource: res = self._api.do("GET", f"/api/2.0/lro-testing/resources/{resource_id}", headers=headers) return TestResource.from_dict(res) + + +class CreateTestResourceOperation: + """Long-running operation for create_test_resource""" + + def __init__(self, impl: LroTestingAPI, operation: Operation): + self._impl = impl + self._operation = operation + + def wait(self, opts: Optional[lro.LroOptions] = None) -> TestResource: + """Wait blocks until the long-running operation is completed with default 20 min + timeout. If the operation didn't finish within the timeout, this function will + raise an error of type TimeoutError, otherwise returns successful response and + any errors encountered. + + :param opts: :class:`LroOptions` + Timeout options (default: 20 minutes) + + :returns: :class:`TestResource` + """ + + def poll_operation(): + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + if not operation.done: + return None, RetryError.continues("operation still in progress") + + if operation.error: + error_msg = operation.error.message if operation.error.message else "unknown error" + if operation.error.error_code: + error_msg = f"[{operation.error.error_code}] {error_msg}" + return None, RetryError.halt(Exception(f"operation failed: {error_msg}")) + + # Operation completed successfully, unmarshal response. + if operation.response is None: + return None, RetryError.halt(Exception("operation completed but no response available")) + + test_resource = TestResource.from_dict(operation.response) + + return test_resource, None + + return poll(poll_operation, timeout=opts.timeout if opts is not None else timedelta(minutes=20)) + + def cancel(self): + """Starts asynchronous cancellation on a long-running operation. The server + makes a best effort to cancel the operation, but success is not guaranteed. + """ + self._impl.cancel_operation(name=self._operation.name) + + def name(self) -> str: + """Name returns the name of the long-running operation. The name is assigned + by the server and is unique within the service from which the operation is created. + + :returns: str + """ + return self._operation.name + + def metadata(self) -> TestResourceOperationMetadata: + """Metadata returns metadata associated with the long-running operation. + If the metadata is not available, the returned metadata is None. + + :returns: :class:`TestResourceOperationMetadata` or None + """ + if self._operation.metadata is None: + return None + + return TestResourceOperationMetadata.from_dict(self._operation.metadata) + + def done(self) -> bool: + """Done reports whether the long-running operation has completed. + + :returns: bool + """ + # Refresh the operation state first + operation = self._impl.get_operation(name=self._operation.name) + + # Update local operation state + self._operation = operation + + return operation.done diff --git a/tests/generated/test_http_call.py b/tests/generated/test_http_call.py index e31805e29..eecb6cf9c 100755 --- a/tests/generated/test_http_call.py +++ b/tests/generated/test_http_call.py @@ -1,7 +1,5 @@ # Code generated by Databricks SDK Generator. DO NOT EDIT. -import json - import pytest from google.protobuf.duration_pb2 import Duration from google.protobuf.timestamp_pb2 import Timestamp @@ -55,7 +53,7 @@ def _fieldmask(d: str) -> FieldMask: lambda requests_mock: requests_mock.patch("http://localhost/api/2.0/http-call/update_string/789/true"), lambda client: client.update_resource( resource=Resource( - any_field=json.loads('{"key": "value"}'), + any_field={"key": "value"}, nested_path_param_bool=True, nested_path_param_int=789, nested_path_param_string="update_string",