Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
f21f4933da405cac4bc77c9732044dc45b4f0c5a
9b38571bfe7bf0bc595480f28eb93a8db3116985
2 changes: 1 addition & 1 deletion NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
### API Changes
* Add `table_deltasharing_open_dir_based` enum value for `databricks.sdk.service.catalog.SecurableKind`.
* Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`.
* [Breaking] Remove `access_modes` and `storage_location` fields for `databricks.sdk.service.sharing.Table`.
* [Breaking] Remove `access_modes` and `storage_location` fields for `databricks.sdk.service.sharing.Table`.
2 changes: 1 addition & 1 deletion databricks/sdk/service/catalog.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions databricks/sdk/service/jobs.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion databricks/sdk/service/settings.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 4 additions & 3 deletions databricks/sdk/service/sharing.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions docs/account/iam/workspace_assignment.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@

a = AccountClient()

workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
workspace_id = os.environ["TEST_WORKSPACE_ID"]

all = a.workspace_assignment.list(workspace_id=workspace_id)
all = a.workspace_assignment.list(list=workspace_id)

Get the permission assignments for the specified Databricks account and Databricks workspace.

Expand Down Expand Up @@ -74,9 +74,9 @@

spn_id = spn.id

workspace_id = os.environ["TEST_WORKSPACE_ID"]
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]

a.workspace_assignment.update(
_ = a.workspace_assignment.update(
workspace_id=workspace_id,
principal_id=spn_id,
permissions=[iam.WorkspacePermission.USER],
Expand Down
6 changes: 3 additions & 3 deletions docs/account/provisioning/credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@

a = AccountClient()

creds = a.credentials.create(
role = a.credentials.create(
credentials_name=f"sdk-{time.time_ns()}",
aws_credentials=provisioning.CreateCredentialAwsCredentials(
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
),
)

# cleanup
a.credentials.delete(credentials_id=creds.credentials_id)
a.credentials.delete(credentials_id=role.credentials_id)

Creates a Databricks credential configuration that represents cloud cross-account credentials for a
specified account. Databricks uses this to set up network infrastructure properly to host Databricks
Expand Down
5 changes: 1 addition & 4 deletions docs/account/provisioning/storage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,10 @@

a = AccountClient()

bucket = a.storage.create(
storage = a.storage.create(
storage_configuration_name=f"sdk-{time.time_ns()}",
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
)

# cleanup
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)

Creates a Databricks storage configuration for an account.

Expand Down
2 changes: 1 addition & 1 deletion docs/dbdataclasses/catalog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo

.. py:class:: SecurableKind

Latest kind: TABLE_DELTASHARING_OPEN_DIR_BASED = 290; Next id:291
Latest kind: CONNECTION_WORKDAY_HCM_USERNAME_PASSWORD = 292; Next id: 293

.. py:attribute:: TABLE_DB_STORAGE
:value: "TABLE_DB_STORAGE"
Expand Down
4 changes: 2 additions & 2 deletions docs/workspace/catalog/catalogs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@

w = WorkspaceClient()

created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
created = w.catalogs.create(name=f"sdk-{time.time_ns()}")

# cleanup
w.catalogs.delete(name=created_catalog.name, force=True)
w.catalogs.delete(name=created.name, force=True)

Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
**CREATE_CATALOG** privilege.
Expand Down
12 changes: 6 additions & 6 deletions docs/workspace/catalog/storage_credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@

w = WorkspaceClient()

credential = w.storage_credentials.create(
created = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

# cleanup
w.storage_credentials.delete(name=credential.name)
w.storage_credentials.delete(delete=created.name)

Creates a new storage credential.

Expand Down Expand Up @@ -172,17 +172,17 @@

created = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

_ = w.storage_credentials.update(
name=created.name,
comment=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

# cleanup
w.storage_credentials.delete(name=created.name)
w.storage_credentials.delete(delete=created.name)

Updates a storage credential on the metastore.

Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/catalog/tables.rst
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@

created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)

summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name)
all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name)

# cleanup
w.schemas.delete(full_name=created_schema.full_name)
Expand Down
3 changes: 2 additions & 1 deletion docs/workspace/compute/clusters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -645,10 +645,11 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import compute

w = WorkspaceClient()

nodes = w.clusters.list_node_types()
all = w.clusters.list(compute.ListClustersRequest())

Return information about all pinned and active clusters, and all clusters terminated within the last
30 days. Clusters terminated prior to this period are not included.
Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/iam/current_user.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

w = WorkspaceClient()

me = w.current_user.me()
me2 = w.current_user.me()

Get details about the current method caller's identity.

Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/iam/permissions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

obj = w.workspace.get_status(path=notebook_path)

levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
_ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))

Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
object.
Expand Down
6 changes: 6 additions & 0 deletions docs/workspace/jobs/jobs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1075,6 +1075,12 @@
Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
run state after the job is submitted.

**Important:** Jobs submitted using this endpoint are not saved as a job. They do not show up in the
Jobs UI, and do not retry when they fail. Because they are not saved, Databricks cannot auto-optimize
serverless compute in case of failure. If your job fails, you may want to use classic compute to
specify the compute needs for the job. Alternatively, use the `POST /jobs/create` and `POST
/jobs/run-now` endpoints to create and run a saved job.

:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
List of permissions to set on the job.
:param budget_policy_id: str (optional)
Expand Down
4 changes: 3 additions & 1 deletion docs/workspace/ml/model_registry.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@
w = WorkspaceClient()

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")

Creates a new registered model with the name specified in the request body. Throws
`RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
Expand Down Expand Up @@ -120,7 +122,7 @@

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")

Creates a model version.

Expand Down
24 changes: 6 additions & 18 deletions docs/workspace/sharing/providers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -101,29 +101,17 @@

.. code-block::

import time

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import sharing

w = WorkspaceClient()

public_share_recipient = """{
"shareCredentialsVersion":1,
"bearerToken":"dapiabcdefghijklmonpqrstuvwxyz",
"endpoint":"https://sharing.delta.io/delta-sharing/"
}
"""

created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient)

shares = w.providers.list_shares(name=created.name)

# cleanup
w.providers.delete(name=created.name)
all = w.providers.list(sharing.ListProvidersRequest())

Gets an array of available authentication providers. The caller must either be a metastore admin or
the owner of the providers. Providers not owned by the caller are not included in the response. There
is no guarantee of a specific ordering of the elements in the array.
Gets an array of available authentication providers. The caller must either be a metastore admin, have
the **USE_PROVIDER** privilege on the providers, or be the owner of the providers. Providers not owned
by the caller and for which the caller does not have the **USE_PROVIDER** privilege are not included
in the response. There is no guarantee of a specific ordering of the elements in the array.

:param data_provider_global_metastore_id: str (optional)
If not provided, all providers will be returned. If no providers exist with this ID, no results will
Expand Down
4 changes: 2 additions & 2 deletions docs/workspace/workspace/workspace.rst
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@

notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook)
export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook)

Exports an object or the contents of an entire directory.

Expand Down Expand Up @@ -180,7 +180,7 @@

w.workspace.import_(
path=notebook_path,
overwrite=true_,
overwrite=True,
format=workspace.ImportFormat.SOURCE,
language=workspace.Language.PYTHON,
content=base64.b64encode(
Expand Down
Loading