Skip to content

Commit 8a07538

Browse files
Update SDK API to 44cbc832f1b070c47544ff470fd8498853d24cf3 (#1182)
This PR updates the SDK to the latest API changes. NO_CHANGELOG=true Co-authored-by: databricks-ci-ghec-1[bot] <184311507+databricks-ci-ghec-1[bot]@users.noreply.github.com>
1 parent 1ea4956 commit 8a07538

File tree

15 files changed

+62
-51
lines changed

15 files changed

+62
-51
lines changed

.codegen/_openapi_sha

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
b7f0bdef8060a7daec78568f87fae44773f598c5
1+
44cbc832f1b070c47544ff470fd8498853d24cf3

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,3 +39,4 @@
3939
* Add `command` and `env_vars` fields for `databricks.sdk.service.apps.AppDeployment`.
4040
* Add `full_name` and `securable_type` fields for `databricks.sdk.service.catalog.AccessRequestDestinations`.
4141
* [Breaking] Change `delete_kafka_config()` method for [w.feature_engineering](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/feature_engineering.html) workspace-level service . Method path has changed.
42+
* [Breaking] Change long-running operation configuration for [PostgresAPI.delete_role](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/postgres/postgres.html#databricks.sdk.service.postgres.PostgresAPI.delete_role) method . Long running operation response type changed to `None` dataclass.

databricks/sdk/service/postgres.py

Lines changed: 4 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/account/iam/workspace_assignment.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,9 @@
4343
4444
a = AccountClient()
4545
46-
workspace_id = os.environ["TEST_WORKSPACE_ID"]
46+
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
4747
48-
all = a.workspace_assignment.list(list=workspace_id)
48+
all = a.workspace_assignment.list(workspace_id=workspace_id)
4949
5050
Get the permission assignments for the specified Databricks account and Databricks workspace.
5151

@@ -74,9 +74,9 @@
7474
7575
spn_id = spn.id
7676
77-
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
77+
workspace_id = os.environ["TEST_WORKSPACE_ID"]
7878
79-
_ = a.workspace_assignment.update(
79+
a.workspace_assignment.update(
8080
workspace_id=workspace_id,
8181
principal_id=spn_id,
8282
permissions=[iam.WorkspacePermission.USER],

docs/account/provisioning/credentials.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,15 @@
2424
2525
a = AccountClient()
2626
27-
creds = a.credentials.create(
27+
role = a.credentials.create(
2828
credentials_name=f"sdk-{time.time_ns()}",
2929
aws_credentials=provisioning.CreateCredentialAwsCredentials(
30-
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
30+
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
3131
),
3232
)
3333
3434
# cleanup
35-
a.credentials.delete(credentials_id=creds.credentials_id)
35+
a.credentials.delete(credentials_id=role.credentials_id)
3636
3737
Creates a Databricks credential configuration that represents cloud cross-account credentials for a
3838
specified account. Databricks uses this to set up network infrastructure properly to host Databricks

docs/account/provisioning/storage.rst

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,10 @@
2323
2424
a = AccountClient()
2525
26-
bucket = a.storage.create(
26+
storage = a.storage.create(
2727
storage_configuration_name=f"sdk-{time.time_ns()}",
2828
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
2929
)
30-
31-
# cleanup
32-
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
3330
3431
Creates a Databricks storage configuration for an account.
3532

docs/workspace/catalog/catalogs.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@
2424
2525
w = WorkspaceClient()
2626
27-
created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
27+
new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
2828
2929
# cleanup
30-
w.catalogs.delete(name=created.name, force=True)
30+
w.catalogs.delete(name=new_catalog.name, force=True)
3131
3232
Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
3333
**CREATE_CATALOG** privilege.

docs/workspace/catalog/external_locations.rst

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,20 +30,22 @@
3030
3131
w = WorkspaceClient()
3232
33-
credential = w.storage_credentials.create(
33+
storage_credential = w.storage_credentials.create(
3434
name=f"sdk-{time.time_ns()}",
35-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
35+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
36+
comment="created via SDK",
3637
)
3738
38-
created = w.external_locations.create(
39+
external_location = w.external_locations.create(
3940
name=f"sdk-{time.time_ns()}",
40-
credential_name=credential.name,
41-
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
41+
credential_name=storage_credential.name,
42+
comment="created via SDK",
43+
url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
4244
)
4345
4446
# cleanup
45-
w.storage_credentials.delete(delete=credential.name)
46-
w.external_locations.delete(delete=created.name)
47+
w.storage_credentials.delete(name=storage_credential.name)
48+
w.external_locations.delete(name=external_location.name)
4749
4850
Creates a new external location entry in the metastore. The caller must be a metastore admin or have
4951
the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
@@ -140,11 +142,10 @@
140142
.. code-block::
141143
142144
from databricks.sdk import WorkspaceClient
143-
from databricks.sdk.service import catalog
144145
145146
w = WorkspaceClient()
146147
147-
all = w.external_locations.list(catalog.ListExternalLocationsRequest())
148+
all = w.external_locations.list()
148149
149150
Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
150151
must be a metastore admin, the owner of the external location, or a user that has some privilege on

docs/workspace/catalog/schemas.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,13 @@
2222
2323
w = WorkspaceClient()
2424
25-
created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
25+
new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
2626
27-
created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
27+
created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name)
2828
2929
# cleanup
30-
w.catalogs.delete(name=created_catalog.name, force=True)
31-
w.schemas.delete(full_name=created_schema.full_name)
30+
w.catalogs.delete(name=new_catalog.name, force=True)
31+
w.schemas.delete(full_name=created.full_name)
3232
3333
Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the
3434
**CREATE_SCHEMA** privilege in the parent catalog.

docs/workspace/catalog/storage_credentials.rst

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,14 @@
3030
3131
w = WorkspaceClient()
3232
33-
created = w.storage_credentials.create(
33+
storage_credential = w.storage_credentials.create(
3434
name=f"sdk-{time.time_ns()}",
35-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
35+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
36+
comment="created via SDK",
3637
)
3738
3839
# cleanup
39-
w.storage_credentials.delete(delete=created.name)
40+
w.storage_credentials.delete(name=storage_credential.name)
4041
4142
Creates a new storage credential.
4243

@@ -172,17 +173,17 @@
172173
173174
created = w.storage_credentials.create(
174175
name=f"sdk-{time.time_ns()}",
175-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
176+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
176177
)
177178
178179
_ = w.storage_credentials.update(
179180
name=created.name,
180181
comment=f"sdk-{time.time_ns()}",
181-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
182+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
182183
)
183184
184185
# cleanup
185-
w.storage_credentials.delete(delete=created.name)
186+
w.storage_credentials.delete(name=created.name)
186187
187188
Updates a storage credential on the metastore.
188189

0 commit comments

Comments
 (0)