diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 8622b29ca..dfe78790a 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d \ No newline at end of file +779817ed8d63031f5ea761fbd25ee84f38feec0d \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 911142118..981503c32 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1521,7 +1521,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServic databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DataPlaneInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java linguist-generated=true @@ -1574,6 +1573,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePip databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java linguist-generated=true @@ -1628,7 +1628,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Pipelines databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true @@ -1751,6 +1751,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessageRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 634a9d1ee..9c42cecd4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,33 @@ # Version changelog +## [Release] Release v0.39.0 + +### Internal Changes + + * Migrate workflows that need write access to use hosted runners ([#397](https://github.com/databricks/databricks-sdk-java/pull/397)). + + +### API Changes: + + * Added `noCompute` field for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Added `hasMore` field for `com.databricks.sdk.service.jobs.BaseJob`. + * Added `hasMore` field for `com.databricks.sdk.service.jobs.BaseRun`. + * Added `pageToken` field for `com.databricks.sdk.service.jobs.GetJobRequest`. + * Added `hasMore` and `nextPageToken` fields for `com.databricks.sdk.service.jobs.Job`. + * Added `hasMore` field for `com.databricks.sdk.service.jobs.Run`. + * Added `runAs` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. + * Added `runAs` field for `com.databricks.sdk.service.pipelines.EditPipeline`. + * Added `authorizationDetails` and `endpointUrl` fields for `com.databricks.sdk.service.serving.DataPlaneInfo`. + * Added . + * Changed `update()` method for `accountClient.federationPolicy()` service with new required argument order. + * Changed `update()` method for `accountClient.servicePrincipalFederationPolicy()` service with new required argument order. + * Changed `updateMask` field for `com.databricks.sdk.service.oauth2.UpdateAccountFederationPolicyRequest` to no longer be required. + * Changed `updateMask` field for `com.databricks.sdk.service.oauth2.UpdateServicePrincipalFederationPolicyRequest` to no longer be required. + * Changed `daysOfWeek` field for `com.databricks.sdk.service.pipelines.RestartWindow` to type `com.databricks.sdk.service.pipelines.DayOfWeekList` class. + +OpenAPI SHA: 779817ed8d63031f5ea761fbd25ee84f38feec0d, Date: 2025-01-08 + + ## [Release] Release v0.38.0 ### API Changes: diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index 0fd6a5edf..0007415b3 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.38.0 + 0.39.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java index 7d1076bb7..0a2d2eb59 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java @@ -3,7 +3,9 @@ package com.databricks.sdk.service.apps; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @@ -14,6 +16,11 @@ public class CreateAppRequest { @JsonProperty("app") private App app; + /** If true, the app will not be started after creation. */ + @JsonIgnore + @QueryParam("no_compute") + private Boolean noCompute; + public CreateAppRequest setApp(App app) { this.app = app; return this; @@ -23,21 +30,33 @@ public App getApp() { return app; } + public CreateAppRequest setNoCompute(Boolean noCompute) { + this.noCompute = noCompute; + return this; + } + + public Boolean getNoCompute() { + return noCompute; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppRequest that = (CreateAppRequest) o; - return Objects.equals(app, that.app); + return Objects.equals(app, that.app) && Objects.equals(noCompute, that.noCompute); } @Override public int hashCode() { - return Objects.hash(app); + return Objects.hash(app, noCompute); } @Override public String toString() { - return new ToStringer(CreateAppRequest.class).add("app", app).toString(); + return new ToStringer(CreateAppRequest.class) + .add("app", app) + .add("noCompute", noCompute) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java index c68ebfa80..b697378c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java @@ -7,6 +7,7 @@ @Generated public enum ProvisioningInfoState { ACTIVE, + DEGRADED, DELETING, FAILED, PROVISIONING, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index 4f335aaa0..9fc3a5d45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -31,6 +31,14 @@ public class BaseJob { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** + * Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + * can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 + * :method:jobs/list requests with `expand_tasks=true`. + */ + @JsonProperty("has_more") + private Boolean hasMore; + /** The canonical identifier for this job. */ @JsonProperty("job_id") private Long jobId; @@ -69,6 +77,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public BaseJob setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + public BaseJob setJobId(Long jobId) { this.jobId = jobId; return this; @@ -95,13 +112,15 @@ public boolean equals(Object o) { return Objects.equals(createdTime, that.createdTime) && Objects.equals(creatorUserName, that.creatorUserName) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobId, that.jobId) && Objects.equals(settings, that.settings); } @Override public int hashCode() { - return Objects.hash(createdTime, creatorUserName, effectiveBudgetPolicyId, jobId, settings); + return Objects.hash( + createdTime, creatorUserName, effectiveBudgetPolicyId, hasMore, jobId, settings); } @Override @@ -110,6 +129,7 @@ public String toString() { .add("createdTime", createdTime) .add("creatorUserName", creatorUserName) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("hasMore", hasMore) .add("jobId", jobId) .add("settings", settings) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index 14ff9aded..932a1f363 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -82,10 +82,19 @@ public class BaseRun { @JsonProperty("git_source") private GitSource gitSource; + /** + * Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + * can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + * :method:jobs/listruns requests with `expand_tasks=true`. + */ + @JsonProperty("has_more") + private Boolean hasMore; + /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. + * task settings. If more than 100 job clusters are available, you can paginate through them using + * :method:jobs/getrun. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -188,7 +197,9 @@ public class BaseRun { /** * The list of tasks performed by the run. Each task has its own `run_id` which you can use to - * call `JobsGetOutput` to retrieve the run resutls. + * call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can + * paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object + * root to determine if more results are available. */ @JsonProperty("tasks") private Collection tasks; @@ -293,6 +304,15 @@ public GitSource getGitSource() { return gitSource; } + public BaseRun setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + public BaseRun setJobClusters(Collection jobClusters) { this.jobClusters = jobClusters; return this; @@ -505,6 +525,7 @@ public boolean equals(Object o) { && Objects.equals(endTime, that.endTime) && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobClusters, that.jobClusters) && Objects.equals(jobId, that.jobId) && Objects.equals(jobParameters, that.jobParameters) @@ -541,6 +562,7 @@ public int hashCode() { endTime, executionDuration, gitSource, + hasMore, jobClusters, jobId, jobParameters, @@ -577,6 +599,7 @@ public String toString() { .add("endTime", endTime) .add("executionDuration", executionDuration) .add("gitSource", gitSource) + .add("hasMore", hasMore) .add("jobClusters", jobClusters) .add("jobId", jobId) .add("jobParameters", jobParameters) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index c065968f0..d1f4f2c36 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -94,7 +94,8 @@ public class CreateJob { /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. + * task settings. If more than 100 job clusters are available, you can paginate through them using + * :method:jobs/get. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -156,7 +157,11 @@ public class CreateJob { @JsonProperty("tags") private Map tags; - /** A list of task specifications to be executed by this job. */ + /** + * A list of task specifications to be executed by this job. If more than 100 tasks are available, + * you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the + * object root to determine if more results are available. + */ @JsonProperty("tasks") private Collection tasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java index 5ac0f1105..74fc2f572 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetJobRequest.java @@ -16,6 +16,14 @@ public class GetJobRequest { @QueryParam("job_id") private Long jobId; + /** + * Use `next_page_token` returned from the previous GetJob to request the next page of the job's + * sub-resources. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + public GetJobRequest setJobId(Long jobId) { this.jobId = jobId; return this; @@ -25,21 +33,33 @@ public Long getJobId() { return jobId; } + public GetJobRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetJobRequest that = (GetJobRequest) o; - return Objects.equals(jobId, that.jobId); + return Objects.equals(jobId, that.jobId) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(jobId); + return Objects.hash(jobId, pageToken); } @Override public String toString() { - return new ToStringer(GetJobRequest.class).add("jobId", jobId).toString(); + return new ToStringer(GetJobRequest.class) + .add("jobId", jobId) + .add("pageToken", pageToken) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java index bf870d3f6..9b2c2e9e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java @@ -22,8 +22,8 @@ public class GetRunRequest { private Boolean includeResolvedValues; /** - * To list the next page of job tasks, set this field to the value of the `next_page_token` - * returned in the GetJob response. + * Use `next_page_token` returned from the previous GetRun to request the next page of the run's + * sub-resources. */ @JsonIgnore @QueryParam("page_token") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index 6d6342874..f996f662b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -32,10 +32,22 @@ public class Job { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** + * Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + * can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 + * :method:jobs/list requests with `expand_tasks=true`. + */ + @JsonProperty("has_more") + private Boolean hasMore; + /** The canonical identifier for this job. */ @JsonProperty("job_id") private Long jobId; + /** A token that can be used to list the next page of sub-resources. */ + @JsonProperty("next_page_token") + private String nextPageToken; + /** * The email of an active workspace user or the application ID of a service principal that the job * runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -82,6 +94,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public Job setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + public Job setJobId(Long jobId) { this.jobId = jobId; return this; @@ -91,6 +112,15 @@ public Long getJobId() { return jobId; } + public Job setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + public Job setRunAsUserName(String runAsUserName) { this.runAsUserName = runAsUserName; return this; @@ -117,7 +147,9 @@ public boolean equals(Object o) { return Objects.equals(createdTime, that.createdTime) && Objects.equals(creatorUserName, that.creatorUserName) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobId, that.jobId) + && Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(runAsUserName, that.runAsUserName) && Objects.equals(settings, that.settings); } @@ -125,7 +157,14 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - createdTime, creatorUserName, effectiveBudgetPolicyId, jobId, runAsUserName, settings); + createdTime, + creatorUserName, + effectiveBudgetPolicyId, + hasMore, + jobId, + nextPageToken, + runAsUserName, + settings); } @Override @@ -134,7 +173,9 @@ public String toString() { .add("createdTime", createdTime) .add("creatorUserName", creatorUserName) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("hasMore", hasMore) .add("jobId", jobId) + .add("nextPageToken", nextPageToken) .add("runAsUserName", runAsUserName) .add("settings", settings) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index 6a593805c..71ded0884 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -90,7 +90,8 @@ public class JobSettings { /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. + * task settings. If more than 100 job clusters are available, you can paginate through them using + * :method:jobs/get. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -152,7 +153,11 @@ public class JobSettings { @JsonProperty("tags") private Map tags; - /** A list of task specifications to be executed by this job. */ + /** + * A list of task specifications to be executed by this job. If more than 100 tasks are available, + * you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the + * object root to determine if more results are available. + */ @JsonProperty("tasks") private Collection tasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java index 5b0ce638a..5542bb665 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java @@ -182,6 +182,11 @@ public Job get(long jobId) { * Get a single job. * *

Retrieves the details for a single job. + * + *

In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` + * when either exceeds 100 elements. Use the `next_page_token` field to check for more results and + * pass its value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements + * in a page will be empty on later pages. */ public Job get(GetJobRequest request) { return impl.get(request); @@ -220,7 +225,12 @@ public Run getRun(long runId) { /** * Get a single job run. * - *

Retrieve the metadata of a run. + *

Retrieves the metadata of a run. + * + *

In Jobs API 2.2, requests for a single job run support pagination of `tasks` and + * `job_clusters` when either exceeds 100 elements. Use the `next_page_token` field to check for + * more results and pass its value as the `page_token` in subsequent requests. Arrays with fewer + * than 100 elements in a page will be empty on later pages. */ public Run getRun(GetRunRequest request) { return impl.getRun(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java index 46696459b..2b8b9ee9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java @@ -76,6 +76,11 @@ public interface JobsService { * Get a single job. * *

Retrieves the details for a single job. + * + *

In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` + * when either exceeds 100 elements. Use the `next_page_token` field to check for more results and + * pass its value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements + * in a page will be empty on later pages. */ Job get(GetJobRequest getJobRequest); @@ -97,7 +102,12 @@ GetJobPermissionLevelsResponse getPermissionLevels( /** * Get a single job run. * - *

Retrieve the metadata of a run. + *

Retrieves the metadata of a run. + * + *

In Jobs API 2.2, requests for a single job run support pagination of `tasks` and + * `job_clusters` when either exceeds 100 elements. Use the `next_page_token` field to check for + * more results and pass its value as the `page_token` in subsequent requests. Arrays with fewer + * than 100 elements in a page will be empty on later pages. */ Run getRun(GetRunRequest getRunRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java index 0de7c7253..b49e9330e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java @@ -11,7 +11,11 @@ /** List jobs */ @Generated public class ListJobsRequest { - /** Whether to include task and cluster details in the response. */ + /** + * Whether to include task and cluster details in the response. Note that in API 2.2, only the + * first 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and + * clusters. + */ @JsonIgnore @QueryParam("expand_tasks") private Boolean expandTasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java index 62fd55fb5..19b36509b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java @@ -28,7 +28,11 @@ public class ListRunsRequest { @QueryParam("completed_only") private Boolean completedOnly; - /** Whether to include task and cluster details in the response. */ + /** + * Whether to include task and cluster details in the response. Note that in API 2.2, only the + * first 100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and + * clusters. + */ @JsonIgnore @QueryParam("expand_tasks") private Boolean expandTasks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index d5518d321..bee02f004 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -83,6 +83,14 @@ public class Run { @JsonProperty("git_source") private GitSource gitSource; + /** + * Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They + * can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 + * :method:jobs/listruns requests with `expand_tasks=true`. + */ + @JsonProperty("has_more") + private Boolean hasMore; + /** Only populated by for-each iterations. The parent for-each task is located in tasks array. */ @JsonProperty("iterations") private Collection iterations; @@ -90,7 +98,8 @@ public class Run { /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - * task settings. + * task settings. If more than 100 job clusters are available, you can paginate through them using + * :method:jobs/getrun. */ @JsonProperty("job_clusters") private Collection jobClusters; @@ -197,7 +206,9 @@ public class Run { /** * The list of tasks performed by the run. Each task has its own `run_id` which you can use to - * call `JobsGetOutput` to retrieve the run resutls. + * call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can + * paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object + * root to determine if more results are available. */ @JsonProperty("tasks") private Collection tasks; @@ -302,6 +313,15 @@ public GitSource getGitSource() { return gitSource; } + public Run setHasMore(Boolean hasMore) { + this.hasMore = hasMore; + return this; + } + + public Boolean getHasMore() { + return hasMore; + } + public Run setIterations(Collection iterations) { this.iterations = iterations; return this; @@ -532,6 +552,7 @@ public boolean equals(Object o) { && Objects.equals(endTime, that.endTime) && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(hasMore, that.hasMore) && Objects.equals(iterations, that.iterations) && Objects.equals(jobClusters, that.jobClusters) && Objects.equals(jobId, that.jobId) @@ -570,6 +591,7 @@ public int hashCode() { endTime, executionDuration, gitSource, + hasMore, iterations, jobClusters, jobId, @@ -608,6 +630,7 @@ public String toString() { .add("endTime", endTime) .add("executionDuration", executionDuration) .add("gitSource", gitSource) + .add("hasMore", hasMore) .add("iterations", iterations) .add("jobClusters", jobClusters) .add("jobId", jobId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java index 56185f68a..1c39c300c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java @@ -104,9 +104,8 @@ public Iterable list(ListAccountFederationPoliciesRequest requ }); } - public FederationPolicy update(String policyId, String updateMask) { - return update( - new UpdateAccountFederationPolicyRequest().setPolicyId(policyId).setUpdateMask(updateMask)); + public FederationPolicy update(String policyId) { + return update(new UpdateAccountFederationPolicyRequest().setPolicyId(policyId)); } /** Update account federation policy. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java index d7391eb14..f4641952e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateAccountFederationPolicyRequest.java @@ -17,8 +17,9 @@ public class CreateAccountFederationPolicyRequest { private FederationPolicy policy; /** - * The identifier for the federation policy. If unspecified, the id will be assigned by - * Databricks. + * The identifier for the federation policy. The identifier must contain only lowercase + * alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned + * by Databricks. */ @JsonIgnore @QueryParam("policy_id") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java index 7d1e1b78f..517a5f08e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateServicePrincipalFederationPolicyRequest.java @@ -17,8 +17,9 @@ public class CreateServicePrincipalFederationPolicyRequest { private FederationPolicy policy; /** - * The identifier for the federation policy. If unspecified, the id will be assigned by - * Databricks. + * The identifier for the federation policy. The identifier must contain only lowercase + * alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned + * by Databricks. */ @JsonIgnore @QueryParam("policy_id") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java index 42e92132a..ab0bb9b5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java @@ -10,7 +10,7 @@ /** Delete account federation policy */ @Generated public class DeleteAccountFederationPolicyRequest { - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; public DeleteAccountFederationPolicyRequest setPolicyId(String policyId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java index c72cba6de..63b5dedae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java @@ -10,7 +10,7 @@ /** Delete service principal federation policy */ @Generated public class DeleteServicePrincipalFederationPolicyRequest { - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; /** The service principal id for the federation policy. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java index feb093234..f8949b29f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java @@ -18,8 +18,13 @@ public class FederationPolicy { private String description; /** - * Name of the federation policy. The name must contain only lowercase alphanumeric characters, - * numbers, and hyphens. It must be unique within the account. + * Resource name for the federation policy. Example values include + * `accounts//federationPolicies/my-federation-policy` for Account Federation + * Policies, and + * `accounts//servicePrincipals//federationPolicies/my-federation-policy` + * for Service Principal Federation Policies. Typically an output parameter, which does not need + * to be specified in create or update requests. If specified in a request, must match the value + * in the request URL. */ @JsonProperty("name") private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java index dfe03d950..2637cf8f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetAccountFederationPolicyRequest.java @@ -10,7 +10,7 @@ /** Get account federation policy */ @Generated public class GetAccountFederationPolicyRequest { - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; public GetAccountFederationPolicyRequest setPolicyId(String policyId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java index 0738ebdc3..a9e986b21 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetServicePrincipalFederationPolicyRequest.java @@ -10,7 +10,7 @@ /** Get service principal federation policy */ @Generated public class GetServicePrincipalFederationPolicyRequest { - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; /** The service principal id for the federation policy. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java index 470ad815f..24b8d3051 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java @@ -125,12 +125,11 @@ public Iterable list(ListServicePrincipalFederationPoliciesReq }); } - public FederationPolicy update(long servicePrincipalId, String policyId, String updateMask) { + public FederationPolicy update(long servicePrincipalId, String policyId) { return update( new UpdateServicePrincipalFederationPolicyRequest() .setServicePrincipalId(servicePrincipalId) - .setPolicyId(policyId) - .setUpdateMask(updateMask)); + .setPolicyId(policyId)); } /** Update service principal federation policy. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java index 9acea6094..c30aa6595 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java @@ -16,13 +16,15 @@ public class UpdateAccountFederationPolicyRequest { @JsonProperty("policy") private FederationPolicy policy; - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; /** - * Field mask is required to be passed into the PATCH request. Field mask specifies which fields - * of the setting payload will be updated. The field mask needs to be supplied as single string. - * To specify multiple fields in the field mask, use comma as the separator (no space). + * The field mask specifies which fields of the policy to update. To specify multiple fields in + * the field mask, use comma as the separator (no space). The special value '*' indicates that all + * fields should be updated (full replacement). If unspecified, all fields that are set in the + * policy provided in the update request will overwrite the corresponding fields in the existing + * policy. Example value: 'description,oidc_policy.audiences'. */ @JsonIgnore @QueryParam("update_mask") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java index 8d95f0392..4f03417eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java @@ -16,16 +16,18 @@ public class UpdateServicePrincipalFederationPolicyRequest { @JsonProperty("policy") private FederationPolicy policy; - /** */ + /** The identifier for the federation policy. */ @JsonIgnore private String policyId; /** The service principal id for the federation policy. */ @JsonIgnore private Long servicePrincipalId; /** - * Field mask is required to be passed into the PATCH request. Field mask specifies which fields - * of the setting payload will be updated. The field mask needs to be supplied as single string. - * To specify multiple fields in the field mask, use comma as the separator (no space). + * The field mask specifies which fields of the policy to update. To specify multiple fields in + * the field mask, use comma as the separator (no space). The special value '*' indicates that all + * fields should be updated (full replacement). If unspecified, all fields that are set in the + * policy provided in the update request will overwrite the corresponding fields in the existing + * policy. Example value: 'description,oidc_policy.audiences'. */ @JsonIgnore @QueryParam("update_mask") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 6a35d6632..5a6676c85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -99,6 +99,17 @@ public class CreatePipeline { @JsonProperty("restart_window") private RestartWindow restartWindow; + /** + * Write-only setting, available only in Create/Update calls. Specifies the user or service + * principal that the pipeline runs as. If not specified, the pipeline runs as the user who + * created the pipeline. + * + *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an + * error is thrown. + */ + @JsonProperty("run_as") + private RunAs runAs; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -306,6 +317,15 @@ public RestartWindow getRestartWindow() { return restartWindow; } + public CreatePipeline setRunAs(RunAs runAs) { + this.runAs = runAs; + return this; + } + + public RunAs getRunAs() { + return runAs; + } + public CreatePipeline setSchema(String schema) { this.schema = schema; return this; @@ -376,6 +396,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(runAs, that.runAs) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -406,6 +427,7 @@ public int hashCode() { notifications, photon, restartWindow, + runAs, schema, serverless, storage, @@ -436,6 +458,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("restartWindow", restartWindow) + .add("runAs", runAs) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java similarity index 92% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java index 37bf738a0..6bce1de65 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java @@ -9,7 +9,7 @@ * start_hour). If not specified all days of the week will be used. */ @Generated -public enum RestartWindowDaysOfWeek { +public enum DayOfWeek { FRIDAY, MONDAY, SATURDAY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 878e76bb3..6604d6864 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -108,6 +108,17 @@ public class EditPipeline { @JsonProperty("restart_window") private RestartWindow restartWindow; + /** + * Write-only setting, available only in Create/Update calls. Specifies the user or service + * principal that the pipeline runs as. If not specified, the pipeline runs as the user who + * created the pipeline. + * + *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an + * error is thrown. + */ + @JsonProperty("run_as") + private RunAs runAs; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -324,6 +335,15 @@ public RestartWindow getRestartWindow() { return restartWindow; } + public EditPipeline setRunAs(RunAs runAs) { + this.runAs = runAs; + return this; + } + + public RunAs getRunAs() { + return runAs; + } + public EditPipeline setSchema(String schema) { this.schema = schema; return this; @@ -395,6 +415,7 @@ public boolean equals(Object o) { && Objects.equals(photon, that.photon) && Objects.equals(pipelineId, that.pipelineId) && Objects.equals(restartWindow, that.restartWindow) + && Objects.equals(runAs, that.runAs) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -426,6 +447,7 @@ public int hashCode() { photon, pipelineId, restartWindow, + runAs, schema, serverless, storage, @@ -457,6 +479,7 @@ public String toString() { .add("photon", photon) .add("pipelineId", pipelineId) .add("restartWindow", restartWindow) + .add("runAs", runAs) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java index 6576bd13f..c4b6cc278 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java @@ -15,7 +15,7 @@ public class RestartWindow { * start_hour). If not specified all days of the week will be used. */ @JsonProperty("days_of_week") - private Collection daysOfWeek; + private Collection daysOfWeek; /** * An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. @@ -32,12 +32,12 @@ public class RestartWindow { @JsonProperty("time_zone_id") private String timeZoneId; - public RestartWindow setDaysOfWeek(Collection daysOfWeek) { + public RestartWindow setDaysOfWeek(Collection daysOfWeek) { this.daysOfWeek = daysOfWeek; return this; } - public Collection getDaysOfWeek() { + public Collection getDaysOfWeek() { return daysOfWeek; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java new file mode 100755 index 000000000..0258b123e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Write-only setting, available only in Create/Update calls. Specifies the user or service + * principal that the pipeline runs as. If not specified, the pipeline runs as the user who created + * the pipeline. + * + *

Only `user_name` or `service_principal_name` can be specified. If both are specified, an error + * is thrown. + */ +@Generated +public class RunAs { + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ + @JsonProperty("service_principal_name") + private String servicePrincipalName; + + /** The email of an active workspace user. Users can only set this field to their own email. */ + @JsonProperty("user_name") + private String userName; + + public RunAs setServicePrincipalName(String servicePrincipalName) { + this.servicePrincipalName = servicePrincipalName; + return this; + } + + public String getServicePrincipalName() { + return servicePrincipalName; + } + + public RunAs setUserName(String userName) { + this.userName = userName; + return this; + } + + public String getUserName() { + return userName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RunAs that = (RunAs) o; + return Objects.equals(servicePrincipalName, that.servicePrincipalName) + && Objects.equals(userName, that.userName); + } + + @Override + public int hashCode() { + return Objects.hash(servicePrincipalName, userName); + } + + @Override + public String toString() { + return new ToStringer(RunAs.class) + .add("servicePrincipalName", servicePrincipalName) + .add("userName", userName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java similarity index 97% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DataPlaneInfo.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java index 0ac65af58..0d1f893f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DataPlaneInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.oauth2; +package com.databricks.sdk.service.serving; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java index 206a6e798..8a77e0b2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java @@ -11,15 +11,14 @@ public class ModelDataPlaneInfo { /** Information required to query DataPlane API 'query' endpoint. */ @JsonProperty("query_info") - private com.databricks.sdk.service.oauth2.DataPlaneInfo queryInfo; + private DataPlaneInfo queryInfo; - public ModelDataPlaneInfo setQueryInfo( - com.databricks.sdk.service.oauth2.DataPlaneInfo queryInfo) { + public ModelDataPlaneInfo setQueryInfo(DataPlaneInfo queryInfo) { this.queryInfo = queryInfo; return this; } - public com.databricks.sdk.service.oauth2.DataPlaneInfo getQueryInfo() { + public DataPlaneInfo getQueryInfo() { return queryInfo; } diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index a9910fa2f..95c8c853f 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.38.0 + 0.39.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index a739a3b4c..e208276ea 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.38.0 + 0.39.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index 0719dc332..48aec6126 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.38.0 + 0.39.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for diff --git a/shaded/pom.xml b/shaded/pom.xml index a5bfc9c7b..937162f84 100644 --- a/shaded/pom.xml +++ b/shaded/pom.xml @@ -4,7 +4,7 @@ 4.0.0 - 0.38.0 + 0.39.0 com.databricks