Skip to content

Commit b5aed30

Browse files
authored
Generated API changes (#1098)
## What changes are proposed in this pull request? API changes generated by genkit ## How is this tested? N/A --------- Signed-off-by: jh-db <[email protected]>
1 parent d153bd3 commit b5aed30

File tree

19 files changed

+92
-70
lines changed

19 files changed

+92
-70
lines changed

.codegen/_openapi_sha

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
e2018bb00cba203508f8afe5a6d41bd49789ba25
1+
59c4c0f3d5f0ef00cd5350b5674e941a7606d91a

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,4 @@
1414
### Internal Changes
1515

1616
### API Changes
17+
* Add `google_ads`, `tiktok_ads`, `salesforce_marketing_cloud`, `hubspot`, `workday_hcm`, `guidewire` and `zendesk` enum values for `databricks.sdk.service.pipelines.IngestionSourceType`.

databricks/sdk/service/jobs.py

Lines changed: 5 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

databricks/sdk/service/pipelines.py

Lines changed: 7 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

docs/account/iam/workspace_assignment.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,9 @@
4343
4444
a = AccountClient()
4545
46-
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
46+
workspace_id = os.environ["TEST_WORKSPACE_ID"]
4747
48-
all = a.workspace_assignment.list(workspace_id=workspace_id)
48+
all = a.workspace_assignment.list(list=workspace_id)
4949
5050
Get the permission assignments for the specified Databricks account and Databricks workspace.
5151

@@ -74,9 +74,9 @@
7474
7575
spn_id = spn.id
7676
77-
workspace_id = os.environ["TEST_WORKSPACE_ID"]
77+
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
7878
79-
a.workspace_assignment.update(
79+
_ = a.workspace_assignment.update(
8080
workspace_id=workspace_id,
8181
principal_id=spn_id,
8282
permissions=[iam.WorkspacePermission.USER],

docs/account/provisioning/credentials.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,15 @@
2424
2525
a = AccountClient()
2626
27-
creds = a.credentials.create(
27+
role = a.credentials.create(
2828
credentials_name=f"sdk-{time.time_ns()}",
2929
aws_credentials=provisioning.CreateCredentialAwsCredentials(
30-
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
30+
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
3131
),
3232
)
3333
3434
# cleanup
35-
a.credentials.delete(credentials_id=creds.credentials_id)
35+
a.credentials.delete(credentials_id=role.credentials_id)
3636
3737
Creates a Databricks credential configuration that represents cloud cross-account credentials for a
3838
specified account. Databricks uses this to set up network infrastructure properly to host Databricks

docs/account/provisioning/storage.rst

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,21 @@
1616

1717
.. code-block::
1818
19+
import os
1920
import time
2021
2122
from databricks.sdk import AccountClient
2223
from databricks.sdk.service import provisioning
2324
2425
a = AccountClient()
2526
26-
bucket = a.storage.create(
27+
storage = a.storage.create(
2728
storage_configuration_name=f"sdk-{time.time_ns()}",
28-
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
29+
root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]),
2930
)
3031
3132
# cleanup
32-
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
33+
a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
3334
3435
Creates a Databricks storage configuration for an account.
3536

docs/dbdataclasses/pipelines.rst

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
151151
.. py:attribute:: GA4_RAW_DATA
152152
:value: "GA4_RAW_DATA"
153153

154+
.. py:attribute:: GOOGLE_ADS
155+
:value: "GOOGLE_ADS"
156+
157+
.. py:attribute:: GUIDEWIRE
158+
:value: "GUIDEWIRE"
159+
160+
.. py:attribute:: HUBSPOT
161+
:value: "HUBSPOT"
162+
154163
.. py:attribute:: MANAGED_POSTGRESQL
155164
:value: "MANAGED_POSTGRESQL"
156165

@@ -175,6 +184,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
175184
.. py:attribute:: SALESFORCE
176185
:value: "SALESFORCE"
177186

187+
.. py:attribute:: SALESFORCE_MARKETING_CLOUD
188+
:value: "SALESFORCE_MARKETING_CLOUD"
189+
178190
.. py:attribute:: SERVICENOW
179191
:value: "SERVICENOW"
180192

@@ -190,9 +202,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
190202
.. py:attribute:: TERADATA
191203
:value: "TERADATA"
192204

205+
.. py:attribute:: TIKTOK_ADS
206+
:value: "TIKTOK_ADS"
207+
208+
.. py:attribute:: WORKDAY_HCM
209+
:value: "WORKDAY_HCM"
210+
193211
.. py:attribute:: WORKDAY_RAAS
194212
:value: "WORKDAY_RAAS"
195213

214+
.. py:attribute:: ZENDESK
215+
:value: "ZENDESK"
216+
196217
.. autoclass:: ListPipelineEventsResponse
197218
:members:
198219
:undoc-members:

docs/workspace/catalog/catalogs.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@
2424
2525
w = WorkspaceClient()
2626
27-
created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
27+
created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
2828
2929
# cleanup
30-
w.catalogs.delete(name=created_catalog.name, force=True)
30+
w.catalogs.delete(name=created.name, force=True)
3131
3232
Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
3333
**CREATE_CATALOG** privilege.

docs/workspace/catalog/external_locations.rst

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -30,20 +30,22 @@
3030
3131
w = WorkspaceClient()
3232
33-
credential = w.storage_credentials.create(
33+
storage_credential = w.storage_credentials.create(
3434
name=f"sdk-{time.time_ns()}",
3535
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
36+
comment="created via SDK",
3637
)
3738
38-
created = w.external_locations.create(
39+
external_location = w.external_locations.create(
3940
name=f"sdk-{time.time_ns()}",
40-
credential_name=credential.name,
41-
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
41+
credential_name=storage_credential.name,
42+
comment="created via SDK",
43+
url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
4244
)
4345
4446
# cleanup
45-
w.storage_credentials.delete(name=credential.name)
46-
w.external_locations.delete(name=created.name)
47+
w.storage_credentials.delete(name=storage_credential.name)
48+
w.external_locations.delete(name=external_location.name)
4749
4850
Creates a new external location entry in the metastore. The caller must be a metastore admin or have
4951
the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
@@ -105,20 +107,20 @@
105107
106108
credential = w.storage_credentials.create(
107109
name=f"sdk-{time.time_ns()}",
108-
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
110+
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
109111
)
110112
111113
created = w.external_locations.create(
112114
name=f"sdk-{time.time_ns()}",
113115
credential_name=credential.name,
114-
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
116+
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
115117
)
116118
117-
_ = w.external_locations.get(name=created.name)
119+
_ = w.external_locations.get(get=created.name)
118120
119121
# cleanup
120-
w.storage_credentials.delete(name=credential.name)
121-
w.external_locations.delete(name=created.name)
122+
w.storage_credentials.delete(delete=credential.name)
123+
w.external_locations.delete(delete=created.name)
122124
123125
Gets an external location from the metastore. The caller must be either a metastore admin, the owner
124126
of the external location, or a user that has some privilege on the external location.
@@ -140,10 +142,11 @@
140142
.. code-block::
141143
142144
from databricks.sdk import WorkspaceClient
145+
from databricks.sdk.service import catalog
143146
144147
w = WorkspaceClient()
145148
146-
all = w.external_locations.list()
149+
all = w.external_locations.list(catalog.ListExternalLocationsRequest())
147150
148151
Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
149152
must be a metastore admin, the owner of the external location, or a user that has some privilege on

0 commit comments

Comments
 (0)