diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index b7f56b6d8..19009050a 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -e2018bb00cba203508f8afe5a6d41bd49789ba25 \ No newline at end of file +59c4c0f3d5f0ef00cd5350b5674e941a7606d91a \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index e2c01b386..f60e0a370 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -14,3 +14,4 @@ ### Internal Changes ### API Changes +* Add `google_ads`, `tiktok_ads`, `salesforce_marketing_cloud`, `hubspot`, `workday_hcm`, `guidewire` and `zendesk` enum values for `databricks.sdk.service.pipelines.IngestionSourceType`. diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 28b9e5c64..5842aef8c 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -6036,8 +6036,11 @@ class Source(Enum): @dataclass class SparkJarTask: jar_uri: Optional[str] = None - """Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, - see :method:jobs/create.""" + """Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field + instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the + `environments` list. + + See the examples of classic and serverless compute usage at the top of the page.""" main_class_name: Optional[str] = None """The full name of the class containing the main method to be executed. This class must be diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index cea28bc53..458c4b388 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -832,6 +832,9 @@ class IngestionSourceType(Enum): DYNAMICS365 = "DYNAMICS365" FOREIGN_CATALOG = "FOREIGN_CATALOG" GA4_RAW_DATA = "GA4_RAW_DATA" + GOOGLE_ADS = "GOOGLE_ADS" + GUIDEWIRE = "GUIDEWIRE" + HUBSPOT = "HUBSPOT" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" META_MARKETING = "META_MARKETING" MYSQL = "MYSQL" @@ -840,12 +843,16 @@ class IngestionSourceType(Enum): POSTGRESQL = "POSTGRESQL" REDSHIFT = "REDSHIFT" SALESFORCE = "SALESFORCE" + SALESFORCE_MARKETING_CLOUD = "SALESFORCE_MARKETING_CLOUD" SERVICENOW = "SERVICENOW" SHAREPOINT = "SHAREPOINT" SQLDW = "SQLDW" SQLSERVER = "SQLSERVER" TERADATA = "TERADATA" + TIKTOK_ADS = "TIKTOK_ADS" + WORKDAY_HCM = "WORKDAY_HCM" WORKDAY_RAAS = "WORKDAY_RAAS" + ZENDESK = "ZENDESK" @dataclass diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 2a8043172..133b16f3d 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - all = a.workspace_assignment.list(workspace_id=workspace_id) + all = a.workspace_assignment.list(list=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - a.workspace_assignment.update( + _ = a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index b71c1707e..d63648d58 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - creds = a.credentials.create( + role = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=creds.credentials_id) + a.credentials.delete(credentials_id=role.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index 41a04deb3..b9f080e36 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,6 +16,7 @@ .. code-block:: + import os import time from databricks.sdk import AccountClient @@ -23,13 +24,13 @@ a = AccountClient() - bucket = a.storage.create( + storage = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), ) # cleanup - a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index aaf5b4b23..cc2f14411 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -151,6 +151,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: GA4_RAW_DATA :value: "GA4_RAW_DATA" + .. py:attribute:: GOOGLE_ADS + :value: "GOOGLE_ADS" + + .. py:attribute:: GUIDEWIRE + :value: "GUIDEWIRE" + + .. py:attribute:: HUBSPOT + :value: "HUBSPOT" + .. py:attribute:: MANAGED_POSTGRESQL :value: "MANAGED_POSTGRESQL" @@ -175,6 +184,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SALESFORCE :value: "SALESFORCE" + .. py:attribute:: SALESFORCE_MARKETING_CLOUD + :value: "SALESFORCE_MARKETING_CLOUD" + .. py:attribute:: SERVICENOW :value: "SERVICENOW" @@ -190,9 +202,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TERADATA :value: "TERADATA" + .. py:attribute:: TIKTOK_ADS + :value: "TIKTOK_ADS" + + .. py:attribute:: WORKDAY_HCM + :value: "WORKDAY_HCM" + .. py:attribute:: WORKDAY_RAAS :value: "WORKDAY_RAAS" + .. py:attribute:: ZENDESK + :value: "ZENDESK" + .. autoclass:: ListPipelineEventsResponse :members: :undoc-members: diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 17297d8dd..77de87dc4 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created_catalog.name, force=True) + w.catalogs.delete(name=created.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 612800956..df5bdb1b6 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,20 +30,22 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) - created = w.external_locations.create( + external_location = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + credential_name=storage_credential.name, + comment="created via SDK", + url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(name=storage_credential.name) + w.external_locations.delete(name=external_location.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -105,20 +107,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) - _ = w.external_locations.get(name=created.name) + _ = w.external_locations.get(get=created.name) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -140,10 +142,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list() + all = w.external_locations.list(catalog.ListExternalLocationsRequest()) Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index d8111141e..ad6e4ebe5 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,13 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=credential.name) + w.storage_credentials.delete(delete=created.name) Creates a new storage credential. @@ -98,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(get=created.name) + by_name = w.storage_credentials.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. @@ -123,11 +123,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) + all = w.storage_credentials.list() Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index db78626ff..d46b8ecd0 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -647,10 +647,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import compute w = WorkspaceClient() - nodes = w.clusters.list_node_types() + all = w.clusters.list(compute.ListClustersRequest()) Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index b2390ce63..2f95213e2 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -17,7 +17,7 @@ w = WorkspaceClient() - me = w.current_user.me() + me2 = w.current_user.me() Get details about the current method caller's identity. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index ea24afd1a..15524c53e 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 0b82986de..39beecc1b 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -357,23 +357,21 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - created_job = w.jobs.create( - name=f"sdk-{time.time_ns()}", + run = w.jobs.submit( + run_name=f"sdk-{time.time_ns()}", tasks=[ - jobs.Task( - description="test", + jobs.SubmitTask( existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key="test", - timeout_seconds=0, + task_key=f"sdk-{time.time_ns()}", ) ], - ) + ).result() - by_id = w.jobs.get(job_id=created_job.job_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete(job_id=created_job.job_id) + w.jobs.delete_run(run_id=run.run_id) Get a single job. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 98d803a63..2d34256e4 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -90,7 +90,7 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -120,7 +120,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. @@ -734,13 +734,14 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - model = w.model_registry.get_model(name=created.registered_model.name) + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") - w.model_registry.update_model( - name=model.registered_model_databricks.name, + w.model_registry.update_model_version( description=f"sdk-{time.time_ns()}", + name=created.model_version.name, + version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 1a7c88de9..fd81e1b24 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -101,25 +101,12 @@ .. code-block:: - import time - from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sharing w = WorkspaceClient() - public_share_recipient = """{ - "shareCredentialsVersion":1, - "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", - "endpoint":"https://sharing.delta.io/delta-sharing/" - } - """ - - created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) - - shares = w.providers.list_shares(name=created.name) - - # cleanup - w.providers.delete(name=created.name) + all = w.providers.list(sharing.ListProvidersRequest()) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 0dfb63fbf..f0081b3f2 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SELECT 1", + query_text="SHOW TABLES", ) ) diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index e1b7d12b9..02f5e5931 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -178,7 +178,7 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=true_, + overwrite=True, path=notebook_path, )