Skip to content

Commit c9d443f

Browse files
author
phoenix
committed
Update SDK to 7b3f29610da67db921050c9648be58891f429a70
1 parent ad941ec commit c9d443f

File tree

14 files changed

+52
-58
lines changed

14 files changed

+52
-58
lines changed

.codegen/_openapi_sha

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
8f5eedbc991c4f04ce1284406577b0c92d59a224
1+
7b3f29610da67db921050c9648be58891f429a70

databricks/sdk/service/catalog.py

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

databricks/sdk/service/compute.py

Lines changed: 6 additions & 12 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/account/provisioning/credentials.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,15 @@
2424
2525
a = AccountClient()
2626
27-
role = a.credentials.create(
27+
creds = a.credentials.create(
2828
credentials_name=f"sdk-{time.time_ns()}",
2929
aws_credentials=provisioning.CreateCredentialAwsCredentials(
30-
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
30+
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
3131
),
3232
)
3333
3434
# cleanup
35-
a.credentials.delete(credentials_id=role.credentials_id)
35+
a.credentials.delete(credentials_id=creds.credentials_id)
3636
3737
Creates a Databricks credential configuration that represents cloud cross-account credentials for a
3838
specified account. Databricks uses this to set up network infrastructure properly to host Databricks

docs/account/provisioning/storage.rst

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,13 @@
2323
2424
a = AccountClient()
2525
26-
storage = a.storage.create(
26+
bucket = a.storage.create(
2727
storage_configuration_name=f"sdk-{time.time_ns()}",
2828
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
2929
)
30+
31+
# cleanup
32+
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
3033
3134
Creates a Databricks storage configuration for an account.
3235

docs/dbdataclasses/catalog.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1497,7 +1497,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
14971497

14981498
.. py:class:: SecurableKind
14991499
1500-
Latest kind: SECRET_EXTERNAL_AWS_SECRETS_MANAGER = 273; Next id:274
1500+
Latest kind: CREDENTIAL_STORAGE_UC_CONNECTION = 275; Next id:276
15011501

15021502
.. py:attribute:: TABLE_DB_STORAGE
15031503
:value: "TABLE_DB_STORAGE"

docs/workspace/catalog/external_locations.rst

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,20 +30,22 @@
3030
3131
w = WorkspaceClient()
3232
33-
credential = w.storage_credentials.create(
33+
storage_credential = w.storage_credentials.create(
3434
name=f"sdk-{time.time_ns()}",
3535
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
36+
comment="created via SDK",
3637
)
3738
38-
created = w.external_locations.create(
39+
external_location = w.external_locations.create(
3940
name=f"sdk-{time.time_ns()}",
40-
credential_name=credential.name,
41-
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
41+
credential_name=storage_credential.name,
42+
comment="created via SDK",
43+
url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
4244
)
4345
4446
# cleanup
45-
w.storage_credentials.delete(name=credential.name)
46-
w.external_locations.delete(name=created.name)
47+
w.storage_credentials.delete(name=storage_credential.name)
48+
w.external_locations.delete(name=external_location.name)
4749
4850
Creates a new external location entry in the metastore. The caller must be a metastore admin or have
4951
the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
@@ -191,24 +193,24 @@
191193
192194
credential = w.storage_credentials.create(
193195
name=f"sdk-{time.time_ns()}",
194-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
196+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
195197
)
196198
197199
created = w.external_locations.create(
198200
name=f"sdk-{time.time_ns()}",
199201
credential_name=credential.name,
200-
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
202+
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
201203
)
202204
203205
_ = w.external_locations.update(
204206
name=created.name,
205207
credential_name=credential.name,
206-
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
208+
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
207209
)
208210
209211
# cleanup
210-
w.storage_credentials.delete(delete=credential.name)
211-
w.external_locations.delete(delete=created.name)
212+
w.storage_credentials.delete(name=credential.name)
213+
w.external_locations.delete(name=created.name)
212214
213215
Updates an external location in the metastore. The caller must be the owner of the external location,
214216
or be a metastore admin. In the second case, the admin can only update the name of the external

docs/workspace/catalog/storage_credentials.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,17 +173,17 @@
173173
174174
created = w.storage_credentials.create(
175175
name=f"sdk-{time.time_ns()}",
176-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
176+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
177177
)
178178
179179
_ = w.storage_credentials.update(
180180
name=created.name,
181181
comment=f"sdk-{time.time_ns()}",
182-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
182+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
183183
)
184184
185185
# cleanup
186-
w.storage_credentials.delete(delete=created.name)
186+
w.storage_credentials.delete(name=created.name)
187187
188188
Updates a storage credential on the metastore.
189189

docs/workspace/compute/clusters.rst

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -167,8 +167,7 @@
167167
node_type_id take precedence.
168168
:param enable_elastic_disk: bool (optional)
169169
Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
170-
when its Spark workers are running low on disk space. This feature requires specific AWS permissions
171-
to function correctly - refer to the User Guide for more details.
170+
when its Spark workers are running low on disk space.
172171
:param enable_local_disk_encryption: bool (optional)
173172
Whether to enable LUKS on cluster VMs' local disks
174173
:param gcp_attributes: :class:`GcpAttributes` (optional)
@@ -402,8 +401,7 @@
402401
node_type_id take precedence.
403402
:param enable_elastic_disk: bool (optional)
404403
Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
405-
when its Spark workers are running low on disk space. This feature requires specific AWS permissions
406-
to function correctly - refer to the User Guide for more details.
404+
when its Spark workers are running low on disk space.
407405
:param enable_local_disk_encryption: bool (optional)
408406
Whether to enable LUKS on cluster VMs' local disks
409407
:param gcp_attributes: :class:`GcpAttributes` (optional)
@@ -647,10 +645,11 @@
647645
.. code-block::
648646
649647
from databricks.sdk import WorkspaceClient
648+
from databricks.sdk.service import compute
650649
651650
w = WorkspaceClient()
652651
653-
nodes = w.clusters.list_node_types()
652+
all = w.clusters.list(compute.ListClustersRequest())
654653
655654
Return information about all pinned and active clusters, and all clusters terminated within the last
656655
30 days. Clusters terminated prior to this period are not included.

docs/workspace/iam/permissions.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
4545
obj = w.workspace.get_status(path=notebook_path)
4646
47-
_ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
47+
levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
4848
4949
Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
5050
object.

0 commit comments

Comments
 (0)