Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8f5eedbc991c4f04ce1284406577b0c92d59a224
29cf17bb7d0af876271bed84b2a8c94670d54541
2 changes: 1 addition & 1 deletion databricks/sdk/service/catalog.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 6 additions & 12 deletions databricks/sdk/service/compute.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 5 additions & 2 deletions databricks/sdk/service/sql.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions docs/account/iam/workspace_assignment.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@

a = AccountClient()

workspace_id = os.environ["TEST_WORKSPACE_ID"]
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]

all = a.workspace_assignment.list(list=workspace_id)
all = a.workspace_assignment.list(workspace_id=workspace_id)

Get the permission assignments for the specified Databricks account and Databricks workspace.

Expand Down Expand Up @@ -74,9 +74,9 @@

spn_id = spn.id

workspace_id = os.environ["TEST_WORKSPACE_ID"]
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]

a.workspace_assignment.update(
_ = a.workspace_assignment.update(
workspace_id=workspace_id,
principal_id=spn_id,
permissions=[iam.WorkspacePermission.USER],
Expand Down
6 changes: 5 additions & 1 deletion docs/account/provisioning/storage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

.. code-block::

import os
import time

from databricks.sdk import AccountClient
Expand All @@ -25,8 +26,11 @@

storage = a.storage.create(
storage_configuration_name=f"sdk-{time.time_ns()}",
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]),
)

# cleanup
a.storage.delete(storage_configuration_id=storage.storage_configuration_id)

Creates a Databricks storage configuration for an account.

Expand Down
2 changes: 1 addition & 1 deletion docs/dbdataclasses/catalog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo

.. py:class:: SecurableKind
Latest kind: SECRET_EXTERNAL_AWS_SECRETS_MANAGER = 273; Next id:274
Latest kind: CREDENTIAL_STORAGE_UC_CONNECTION = 275; Next id:276

.. py:attribute:: TABLE_DB_STORAGE
:value: "TABLE_DB_STORAGE"
Expand Down
21 changes: 10 additions & 11 deletions docs/workspace/catalog/external_locations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,18 @@

credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

created = w.external_locations.create(
name=f"sdk-{time.time_ns()}",
credential_name=credential.name,
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
)

# cleanup
w.storage_credentials.delete(name=credential.name)
w.external_locations.delete(name=created.name)
w.storage_credentials.delete(delete=credential.name)
w.external_locations.delete(delete=created.name)

Creates a new external location entry in the metastore. The caller must be a metastore admin or have
the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
Expand Down Expand Up @@ -140,11 +140,10 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import catalog

w = WorkspaceClient()

all = w.external_locations.list(catalog.ListExternalLocationsRequest())
all = w.external_locations.list()

Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
must be a metastore admin, the owner of the external location, or a user that has some privilege on
Expand Down Expand Up @@ -191,24 +190,24 @@

credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

created = w.external_locations.create(
name=f"sdk-{time.time_ns()}",
credential_name=credential.name,
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
)

_ = w.external_locations.update(
name=created.name,
credential_name=credential.name,
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
)

# cleanup
w.storage_credentials.delete(delete=credential.name)
w.external_locations.delete(delete=created.name)
w.storage_credentials.delete(name=credential.name)
w.external_locations.delete(name=created.name)

Updates an external location in the metastore. The caller must be the owner of the external location,
or be a metastore admin. In the second case, the admin can only update the name of the external
Expand Down
13 changes: 6 additions & 7 deletions docs/workspace/catalog/storage_credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@

w = WorkspaceClient()

created = w.storage_credentials.create(
credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

# cleanup
w.storage_credentials.delete(delete=created.name)
w.storage_credentials.delete(delete=credential.name)

Creates a new storage credential.

Expand Down Expand Up @@ -98,13 +98,13 @@

created = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

by_name = w.storage_credentials.get(get=created.name)
by_name = w.storage_credentials.get(name=created.name)

# cleanup
w.storage_credentials.delete(delete=created.name)
w.storage_credentials.delete(name=created.name)

Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
storage credential, or have some permission on the storage credential.
Expand All @@ -123,11 +123,10 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import catalog

w = WorkspaceClient()

all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest())
all = w.storage_credentials.list()

Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
only those storage credentials the caller has permission to access. If the caller is a metastore
Expand Down
9 changes: 4 additions & 5 deletions docs/workspace/compute/clusters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,7 @@
node_type_id take precedence.
:param enable_elastic_disk: bool (optional)
Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
when its Spark workers are running low on disk space. This feature requires specific AWS permissions
to function correctly - refer to the User Guide for more details.
when its Spark workers are running low on disk space.
:param enable_local_disk_encryption: bool (optional)
Whether to enable LUKS on cluster VMs' local disks
:param gcp_attributes: :class:`GcpAttributes` (optional)
Expand Down Expand Up @@ -402,8 +401,7 @@
node_type_id take precedence.
:param enable_elastic_disk: bool (optional)
Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
when its Spark workers are running low on disk space. This feature requires specific AWS permissions
to function correctly - refer to the User Guide for more details.
when its Spark workers are running low on disk space.
:param enable_local_disk_encryption: bool (optional)
Whether to enable LUKS on cluster VMs' local disks
:param gcp_attributes: :class:`GcpAttributes` (optional)
Expand Down Expand Up @@ -647,10 +645,11 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import compute

w = WorkspaceClient()

nodes = w.clusters.list_node_types()
all = w.clusters.list(compute.ListClustersRequest())

Return information about all pinned and active clusters, and all clusters terminated within the last
30 days. Clusters terminated prior to this period are not included.
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/iam/permissions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

obj = w.workspace.get_status(path=notebook_path)

_ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))

Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
object.
Expand Down
9 changes: 4 additions & 5 deletions docs/workspace/ml/model_registry.rst
Original file line number Diff line number Diff line change
Expand Up @@ -736,14 +736,13 @@

w = WorkspaceClient()

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
model = w.model_registry.get_model(name=created.registered_model.name)

w.model_registry.update_model_version(
w.model_registry.update_model(
name=model.registered_model_databricks.name,
description=f"sdk-{time.time_ns()}",
name=created.model_version.name,
version=created.model_version.version,
)

Updates a registered model.
Expand Down
4 changes: 2 additions & 2 deletions docs/workspace/sql/dbsql_permissions.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
``w.dbsql_permissions``: ACL / Permissions
==========================================
``w.dbsql_permissions``: Permissions (legacy)
=============================================
.. currentmodule:: databricks.sdk.service.sql

.. py:class:: DbsqlPermissionsAPI
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/sql/queries.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
display_name=f"sdk-{time.time_ns()}",
warehouse_id=srcs[0].warehouse_id,
description="test query from Go SDK",
query_text="SHOW TABLES",
query_text="SELECT 1",
)
)

Expand Down
31 changes: 20 additions & 11 deletions docs/workspace/workspace/workspace.rst
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@

notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook)
export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook)

Exports an object or the contents of an entire directory.

Expand Down Expand Up @@ -148,9 +148,9 @@

w = WorkspaceClient()

notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

obj = w.workspace.get_status(path=notebook_path)
get_status_response = w.workspace.get_status(path=notebook)

Gets the status of an object or a directory. If `path` does not exist, this call returns an error
`RESOURCE_DOES_NOT_EXIST`.
Expand Down Expand Up @@ -179,11 +179,18 @@
notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

w.workspace.import_(
content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(),
format=workspace.ImportFormat.SOURCE,
language=workspace.Language.SQL,
overwrite=true_,
path=notebook_path,
overwrite=True,
format=workspace.ImportFormat.SOURCE,
language=workspace.Language.PYTHON,
content=base64.b64encode(
(
"""import time
time.sleep(10)
dbutils.notebook.exit('hello')
"""
).encode()
).decode(),
)

Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
Expand Down Expand Up @@ -227,14 +234,16 @@

.. code-block::

import os
import time

from databricks.sdk import WorkspaceClient

w = WorkspaceClient()

names = []
for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True):
names.append(i.path)
assert len(names) > 0
notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

objects = w.workspace.list(path=os.path.dirname(notebook))

List workspace objects

Expand Down
Loading