diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index 2176b3c8c..1ff268bf4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -8,6 +8,8 @@
import com.databricks.sdk.core.utils.AzureUtils;
import com.databricks.sdk.service.billing.BillableUsageAPI;
import com.databricks.sdk.service.billing.BillableUsageService;
+import com.databricks.sdk.service.billing.BudgetPolicyAPI;
+import com.databricks.sdk.service.billing.BudgetPolicyService;
import com.databricks.sdk.service.billing.BudgetsAPI;
import com.databricks.sdk.service.billing.BudgetsService;
import com.databricks.sdk.service.billing.LogDeliveryAPI;
@@ -73,6 +75,7 @@ public class AccountClient {
private AccountAccessControlAPI accessControlAPI;
private BillableUsageAPI billableUsageAPI;
+ private BudgetPolicyAPI budgetPolicyAPI;
private CredentialsAPI credentialsAPI;
private CustomAppIntegrationAPI customAppIntegrationAPI;
private EncryptionKeysAPI encryptionKeysAPI;
@@ -110,6 +113,7 @@ public AccountClient(DatabricksConfig config) {
accessControlAPI = new AccountAccessControlAPI(apiClient);
billableUsageAPI = new BillableUsageAPI(apiClient);
+ budgetPolicyAPI = new BudgetPolicyAPI(apiClient);
credentialsAPI = new CredentialsAPI(apiClient);
customAppIntegrationAPI = new CustomAppIntegrationAPI(apiClient);
encryptionKeysAPI = new EncryptionKeysAPI(apiClient);
@@ -161,6 +165,11 @@ public BillableUsageAPI billableUsage() {
return billableUsageAPI;
}
+ /** A service serves REST API about Budget policies */
+ public BudgetPolicyAPI budgetPolicy() {
+ return budgetPolicyAPI;
+ }
+
/**
* These APIs manage credential configurations for this workspace. Databricks needs access to a
* cross-account service IAM role in your AWS account so that Databricks can deploy clusters in
@@ -581,6 +590,17 @@ public AccountClient withBillableUsageAPI(BillableUsageAPI billableUsage) {
return this;
}
+ /** Replace the default BudgetPolicyService with a custom implementation. */
+ public AccountClient withBudgetPolicyImpl(BudgetPolicyService budgetPolicy) {
+ return this.withBudgetPolicyAPI(new BudgetPolicyAPI(budgetPolicy));
+ }
+
+ /** Replace the default BudgetPolicyAPI with a custom implementation. */
+ public AccountClient withBudgetPolicyAPI(BudgetPolicyAPI budgetPolicy) {
+ this.budgetPolicyAPI = budgetPolicy;
+ return this;
+ }
+
/** Replace the default CredentialsService with a custom implementation. */
public AccountClient withCredentialsImpl(CredentialsService credentials) {
return this.withCredentialsAPI(new CredentialsAPI(credentials));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index ab237555b..12452d424 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -78,10 +78,16 @@
import com.databricks.sdk.service.dashboards.GenieAPI;
import com.databricks.sdk.service.dashboards.GenieService;
import com.databricks.sdk.service.dashboards.LakeviewAPI;
+import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI;
+import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService;
import com.databricks.sdk.service.dashboards.LakeviewService;
+import com.databricks.sdk.service.dashboards.QueryExecutionAPI;
+import com.databricks.sdk.service.dashboards.QueryExecutionService;
import com.databricks.sdk.service.files.DbfsService;
import com.databricks.sdk.service.files.FilesAPI;
import com.databricks.sdk.service.files.FilesService;
+import com.databricks.sdk.service.iam.AccessControlAPI;
+import com.databricks.sdk.service.iam.AccessControlService;
import com.databricks.sdk.service.iam.AccountAccessControlProxyAPI;
import com.databricks.sdk.service.iam.AccountAccessControlProxyService;
import com.databricks.sdk.service.iam.CurrentUserAPI;
@@ -176,6 +182,8 @@
import com.databricks.sdk.service.sql.QueryVisualizationsLegacyAPI;
import com.databricks.sdk.service.sql.QueryVisualizationsLegacyService;
import com.databricks.sdk.service.sql.QueryVisualizationsService;
+import com.databricks.sdk.service.sql.RedashConfigAPI;
+import com.databricks.sdk.service.sql.RedashConfigService;
import com.databricks.sdk.service.sql.StatementExecutionAPI;
import com.databricks.sdk.service.sql.StatementExecutionService;
import com.databricks.sdk.service.sql.WarehousesAPI;
@@ -199,6 +207,7 @@ public class WorkspaceClient {
private final ApiClient apiClient;
private final DatabricksConfig config;
+ private AccessControlAPI accessControlAPI;
private AccountAccessControlProxyAPI accountAccessControlProxyAPI;
private AlertsAPI alertsAPI;
private AlertsLegacyAPI alertsLegacyAPI;
@@ -239,6 +248,7 @@ public class WorkspaceClient {
private IpAccessListsAPI ipAccessListsAPI;
private JobsAPI jobsAPI;
private LakeviewAPI lakeviewAPI;
+ private LakeviewEmbeddedAPI lakeviewEmbeddedAPI;
private LibrariesAPI librariesAPI;
private MetastoresAPI metastoresAPI;
private ModelRegistryAPI modelRegistryAPI;
@@ -262,11 +272,13 @@ public class WorkspaceClient {
private QualityMonitorsAPI qualityMonitorsAPI;
private QueriesAPI queriesAPI;
private QueriesLegacyAPI queriesLegacyAPI;
+ private QueryExecutionAPI queryExecutionAPI;
private QueryHistoryAPI queryHistoryAPI;
private QueryVisualizationsAPI queryVisualizationsAPI;
private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI;
private RecipientActivationAPI recipientActivationAPI;
private RecipientsAPI recipientsAPI;
+ private RedashConfigAPI redashConfigAPI;
private RegisteredModelsAPI registeredModelsAPI;
private ReposAPI reposAPI;
private ResourceQuotasAPI resourceQuotasAPI;
@@ -301,6 +313,7 @@ public WorkspaceClient(DatabricksConfig config) {
this.config = config;
apiClient = new ApiClient(config);
+ accessControlAPI = new AccessControlAPI(apiClient);
accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient);
alertsAPI = new AlertsAPI(apiClient);
alertsLegacyAPI = new AlertsLegacyAPI(apiClient);
@@ -341,6 +354,7 @@ public WorkspaceClient(DatabricksConfig config) {
ipAccessListsAPI = new IpAccessListsAPI(apiClient);
jobsAPI = new JobsAPI(apiClient);
lakeviewAPI = new LakeviewAPI(apiClient);
+ lakeviewEmbeddedAPI = new LakeviewEmbeddedAPI(apiClient);
librariesAPI = new LibrariesAPI(apiClient);
metastoresAPI = new MetastoresAPI(apiClient);
modelRegistryAPI = new ModelRegistryAPI(apiClient);
@@ -364,11 +378,13 @@ public WorkspaceClient(DatabricksConfig config) {
qualityMonitorsAPI = new QualityMonitorsAPI(apiClient);
queriesAPI = new QueriesAPI(apiClient);
queriesLegacyAPI = new QueriesLegacyAPI(apiClient);
+ queryExecutionAPI = new QueryExecutionAPI(apiClient);
queryHistoryAPI = new QueryHistoryAPI(apiClient);
queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient);
queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient);
recipientActivationAPI = new RecipientActivationAPI(apiClient);
recipientsAPI = new RecipientsAPI(apiClient);
+ redashConfigAPI = new RedashConfigAPI(apiClient);
registeredModelsAPI = new RegisteredModelsAPI(apiClient);
reposAPI = new ReposAPI(apiClient);
resourceQuotasAPI = new ResourceQuotasAPI(apiClient);
@@ -407,6 +423,11 @@ public WorkspaceClient(boolean mock, ApiClient apiClient) {
this.config = null;
}
+ /** Rule based Access Control for Databricks Resources. */
+ public AccessControlAPI accessControl() {
+ return accessControlAPI;
+ }
+
/**
* These APIs manage access rules on resources in an account. Currently, only grant rules are
* supported. A grant rule specifies a role assigned to a set of principals. A list of rules
@@ -737,10 +758,14 @@ public ExternalLocationsAPI externalLocations() {
* /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
*
* The Files API has two distinct endpoints, one for working with files (`/fs/files`) and
- * another one for working with directories (`/fs/directories`). Both endpoints, use the standard
+ * another one for working with directories (`/fs/directories`). Both endpoints use the standard
* HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their
* URI path. The path is always absolute.
*
+ *
Some Files API client features are currently experimental. To enable them, set
+ * `enable_experimental_files_api_client = True` in your configuration profile or use the
+ * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
+ *
*
[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*/
public FilesAPI files() {
@@ -911,6 +936,11 @@ public LakeviewAPI lakeview() {
return lakeviewAPI;
}
+ /** Token-based Lakeview APIs for embedding dashboards in external applications. */
+ public LakeviewEmbeddedAPI lakeviewEmbedded() {
+ return lakeviewEmbeddedAPI;
+ }
+
/**
* The Libraries API allows you to install and uninstall libraries and get the status of libraries
* on a cluster.
@@ -1214,6 +1244,11 @@ public QueriesLegacyAPI queriesLegacy() {
return queriesLegacyAPI;
}
+ /** Query execution APIs for AI / BI Dashboards */
+ public QueryExecutionAPI queryExecution() {
+ return queryExecutionAPI;
+ }
+
/**
* A service responsible for storing and retrieving the list of queries run against SQL endpoints
* and serverless compute.
@@ -1278,6 +1313,11 @@ public RecipientsAPI recipients() {
return recipientsAPI;
}
+ /** Redash V2 service for workspace configurations (internal) */
+ public RedashConfigAPI redashConfig() {
+ return redashConfigAPI;
+ }
+
/**
* Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity
* Catalog provide centralized access control, auditing, lineage, and discovery of ML models
@@ -1687,6 +1727,17 @@ public WorkspaceConfAPI workspaceConf() {
return workspaceConfAPI;
}
+ /** Replace the default AccessControlService with a custom implementation. */
+ public WorkspaceClient withAccessControlImpl(AccessControlService accessControl) {
+ return this.withAccessControlAPI(new AccessControlAPI(accessControl));
+ }
+
+ /** Replace the default AccessControlAPI with a custom implementation. */
+ public WorkspaceClient withAccessControlAPI(AccessControlAPI accessControl) {
+ this.accessControlAPI = accessControl;
+ return this;
+ }
+
/** Replace the default AccountAccessControlProxyService with a custom implementation. */
public WorkspaceClient withAccountAccessControlProxyImpl(
AccountAccessControlProxyService accountAccessControlProxy) {
@@ -2136,6 +2187,17 @@ public WorkspaceClient withLakeviewAPI(LakeviewAPI lakeview) {
return this;
}
+ /** Replace the default LakeviewEmbeddedService with a custom implementation. */
+ public WorkspaceClient withLakeviewEmbeddedImpl(LakeviewEmbeddedService lakeviewEmbedded) {
+ return this.withLakeviewEmbeddedAPI(new LakeviewEmbeddedAPI(lakeviewEmbedded));
+ }
+
+ /** Replace the default LakeviewEmbeddedAPI with a custom implementation. */
+ public WorkspaceClient withLakeviewEmbeddedAPI(LakeviewEmbeddedAPI lakeviewEmbedded) {
+ this.lakeviewEmbeddedAPI = lakeviewEmbedded;
+ return this;
+ }
+
/** Replace the default LibrariesService with a custom implementation. */
public WorkspaceClient withLibrariesImpl(LibrariesService libraries) {
return this.withLibrariesAPI(new LibrariesAPI(libraries));
@@ -2410,6 +2472,17 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) {
return this;
}
+ /** Replace the default QueryExecutionService with a custom implementation. */
+ public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) {
+ return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution));
+ }
+
+ /** Replace the default QueryExecutionAPI with a custom implementation. */
+ public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) {
+ this.queryExecutionAPI = queryExecution;
+ return this;
+ }
+
/** Replace the default QueryHistoryService with a custom implementation. */
public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) {
return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory));
@@ -2470,6 +2543,17 @@ public WorkspaceClient withRecipientsAPI(RecipientsAPI recipients) {
return this;
}
+ /** Replace the default RedashConfigService with a custom implementation. */
+ public WorkspaceClient withRedashConfigImpl(RedashConfigService redashConfig) {
+ return this.withRedashConfigAPI(new RedashConfigAPI(redashConfig));
+ }
+
+ /** Replace the default RedashConfigAPI with a custom implementation. */
+ public WorkspaceClient withRedashConfigAPI(RedashConfigAPI redashConfig) {
+ this.redashConfigAPI = redashConfig;
+ return this;
+ }
+
/** Replace the default RegisteredModelsService with a custom implementation. */
public WorkspaceClient withRegisteredModelsImpl(RegisteredModelsService registeredModels) {
return this.withRegisteredModelsAPI(new RegisteredModelsAPI(registeredModels));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java
new file mode 100755
index 000000000..86e2737b1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Contains the BudgetPolicy details. */
+@Generated
+public class BudgetPolicy {
+ /** A list of tags defined by the customer. At most 20 entries are allowed per policy. */
+ @JsonProperty("custom_tags")
+ private Collection customTags;
+
+ /** The Id of the policy. This field is generated by Databricks and globally unique. */
+ @JsonProperty("policy_id")
+ private String policyId;
+
+ /**
+ * The name of the policy. - Must be unique among active policies. - Can contain only characters
+ * from the ISO 8859-1 (latin1) set.
+ */
+ @JsonProperty("policy_name")
+ private String policyName;
+
+ public BudgetPolicy setCustomTags(
+ Collection customTags) {
+ this.customTags = customTags;
+ return this;
+ }
+
+ public Collection getCustomTags() {
+ return customTags;
+ }
+
+ public BudgetPolicy setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ public BudgetPolicy setPolicyName(String policyName) {
+ this.policyName = policyName;
+ return this;
+ }
+
+ public String getPolicyName() {
+ return policyName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BudgetPolicy that = (BudgetPolicy) o;
+ return Objects.equals(customTags, that.customTags)
+ && Objects.equals(policyId, that.policyId)
+ && Objects.equals(policyName, that.policyName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(customTags, policyId, policyName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BudgetPolicy.class)
+ .add("customTags", customTags)
+ .add("policyId", policyId)
+ .add("policyName", policyName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
new file mode 100755
index 000000000..6934a3443
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
@@ -0,0 +1,98 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A service serves REST API about Budget policies */
+@Generated
+public class BudgetPolicyAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(BudgetPolicyAPI.class);
+
+ private final BudgetPolicyService impl;
+
+ /** Regular-use constructor */
+ public BudgetPolicyAPI(ApiClient apiClient) {
+ impl = new BudgetPolicyImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public BudgetPolicyAPI(BudgetPolicyService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Create a budget policy.
+ *
+ * Creates a new policy.
+ */
+ public BudgetPolicy create(CreateBudgetPolicyRequest request) {
+ return impl.create(request);
+ }
+
+ public void delete(String policyId) {
+ delete(new DeleteBudgetPolicyRequest().setPolicyId(policyId));
+ }
+
+ /**
+ * Delete a budget policy.
+ *
+ *
Deletes a policy
+ */
+ public void delete(DeleteBudgetPolicyRequest request) {
+ impl.delete(request);
+ }
+
+ public BudgetPolicy get(String policyId) {
+ return get(new GetBudgetPolicyRequest().setPolicyId(policyId));
+ }
+
+ /**
+ * Get a budget policy.
+ *
+ *
Retrieves a policy by it's ID.
+ */
+ public BudgetPolicy get(GetBudgetPolicyRequest request) {
+ return impl.get(request);
+ }
+
+ /**
+ * List policies.
+ *
+ *
Lists all policies. Policies are returned in the alphabetically ascending order of their
+ * names.
+ */
+ public Iterable list(ListBudgetPoliciesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListBudgetPoliciesResponse::getPolicies,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public BudgetPolicy update(String policyId) {
+ return update(new UpdateBudgetPolicyRequest().setPolicyId(policyId));
+ }
+
+ /**
+ * Update a budget policy.
+ *
+ * Updates a policy
+ */
+ public BudgetPolicy update(UpdateBudgetPolicyRequest request) {
+ return impl.update(request);
+ }
+
+ public BudgetPolicyService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java
new file mode 100755
index 000000000..dcf84cf42
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyImpl.java
@@ -0,0 +1,96 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of BudgetPolicy */
+@Generated
+class BudgetPolicyImpl implements BudgetPolicyService {
+ private final ApiClient apiClient;
+
+ public BudgetPolicyImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public BudgetPolicy create(CreateBudgetPolicyRequest request) {
+ String path =
+ String.format("/api/2.1/accounts/%s/budget-policies", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, BudgetPolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void delete(DeleteBudgetPolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/budget-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, DeleteResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public BudgetPolicy get(GetBudgetPolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/budget-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, BudgetPolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListBudgetPoliciesResponse list(ListBudgetPoliciesRequest request) {
+ String path =
+ String.format("/api/2.1/accounts/%s/budget-policies", apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListBudgetPoliciesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public BudgetPolicy update(UpdateBudgetPolicyRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/budget-policies/%s",
+ apiClient.configuredAccountID(), request.getPolicyId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, BudgetPolicy.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java
new file mode 100755
index 000000000..a2ff724d4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyService.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * A service serves REST API about Budget policies
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface BudgetPolicyService {
+ /**
+ * Create a budget policy.
+ *
+ *
Creates a new policy.
+ */
+ BudgetPolicy create(CreateBudgetPolicyRequest createBudgetPolicyRequest);
+
+ /**
+ * Delete a budget policy.
+ *
+ *
Deletes a policy
+ */
+ void delete(DeleteBudgetPolicyRequest deleteBudgetPolicyRequest);
+
+ /**
+ * Get a budget policy.
+ *
+ *
Retrieves a policy by it's ID.
+ */
+ BudgetPolicy get(GetBudgetPolicyRequest getBudgetPolicyRequest);
+
+ /**
+ * List policies.
+ *
+ *
Lists all policies. Policies are returned in the alphabetically ascending order of their
+ * names.
+ */
+ ListBudgetPoliciesResponse list(ListBudgetPoliciesRequest listBudgetPoliciesRequest);
+
+ /**
+ * Update a budget policy.
+ *
+ *
Updates a policy
+ */
+ BudgetPolicy update(UpdateBudgetPolicyRequest updateBudgetPolicyRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java
new file mode 100755
index 000000000..8e399ffa3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A request to create a BudgetPolicy. */
+@Generated
+public class CreateBudgetPolicyRequest {
+ /** A list of tags defined by the customer. At most 40 entries are allowed per policy. */
+ @JsonProperty("custom_tags")
+ private Collection customTags;
+
+ /**
+ * The name of the policy. - Must be unique among active policies. - Can contain only characters
+ * of 0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+ */
+ @JsonProperty("policy_name")
+ private String policyName;
+
+ /**
+ * A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+ * recommended. This request is only idempotent if a `request_id` is provided.
+ */
+ @JsonProperty("request_id")
+ private String requestId;
+
+ public CreateBudgetPolicyRequest setCustomTags(
+ Collection customTags) {
+ this.customTags = customTags;
+ return this;
+ }
+
+ public Collection getCustomTags() {
+ return customTags;
+ }
+
+ public CreateBudgetPolicyRequest setPolicyName(String policyName) {
+ this.policyName = policyName;
+ return this;
+ }
+
+ public String getPolicyName() {
+ return policyName;
+ }
+
+ public CreateBudgetPolicyRequest setRequestId(String requestId) {
+ this.requestId = requestId;
+ return this;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateBudgetPolicyRequest that = (CreateBudgetPolicyRequest) o;
+ return Objects.equals(customTags, that.customTags)
+ && Objects.equals(policyName, that.policyName)
+ && Objects.equals(requestId, that.requestId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(customTags, policyName, requestId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateBudgetPolicyRequest.class)
+ .add("customTags", customTags)
+ .add("policyName", policyName)
+ .add("requestId", requestId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java
new file mode 100755
index 000000000..d5aabfb58
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a budget policy */
+@Generated
+public class DeleteBudgetPolicyRequest {
+ /** The Id of the policy. */
+ @JsonIgnore private String policyId;
+
+ public DeleteBudgetPolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteBudgetPolicyRequest that = (DeleteBudgetPolicyRequest) o;
+ return Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteBudgetPolicyRequest.class).add("policyId", policyId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java
similarity index 78%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java
index e7bd13a13..6b9b9aae8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java
@@ -1,13 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.sharing;
+package com.databricks.sdk.service.billing;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@Generated
-public class UpdateResponse {
+public class DeleteResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(UpdateResponse.class).toString();
+ return new ToStringer(DeleteResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java
new file mode 100755
index 000000000..25fb6093d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/Filter.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Structured representation of a filter to be applied to a list of policies. All specified filters
+ * will be applied in conjunction.
+ */
+@Generated
+public class Filter {
+ /**
+ * The policy creator user id to be filtered on. If unspecified, all policies will be returned.
+ */
+ @JsonProperty("creator_user_id")
+ @QueryParam("creator_user_id")
+ private Long creatorUserId;
+
+ /**
+ * The policy creator user name to be filtered on. If unspecified, all policies will be returned.
+ */
+ @JsonProperty("creator_user_name")
+ @QueryParam("creator_user_name")
+ private String creatorUserName;
+
+ /**
+ * The partial name of policies to be filtered on. If unspecified, all policies will be returned.
+ */
+ @JsonProperty("policy_name")
+ @QueryParam("policy_name")
+ private String policyName;
+
+ public Filter setCreatorUserId(Long creatorUserId) {
+ this.creatorUserId = creatorUserId;
+ return this;
+ }
+
+ public Long getCreatorUserId() {
+ return creatorUserId;
+ }
+
+ public Filter setCreatorUserName(String creatorUserName) {
+ this.creatorUserName = creatorUserName;
+ return this;
+ }
+
+ public String getCreatorUserName() {
+ return creatorUserName;
+ }
+
+ public Filter setPolicyName(String policyName) {
+ this.policyName = policyName;
+ return this;
+ }
+
+ public String getPolicyName() {
+ return policyName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Filter that = (Filter) o;
+ return Objects.equals(creatorUserId, that.creatorUserId)
+ && Objects.equals(creatorUserName, that.creatorUserName)
+ && Objects.equals(policyName, that.policyName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(creatorUserId, creatorUserName, policyName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Filter.class)
+ .add("creatorUserId", creatorUserId)
+ .add("creatorUserName", creatorUserName)
+ .add("policyName", policyName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java
new file mode 100755
index 000000000..12c33b5cd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get a budget policy */
+@Generated
+public class GetBudgetPolicyRequest {
+ /** The Id of the policy. */
+ @JsonIgnore private String policyId;
+
+ public GetBudgetPolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetBudgetPolicyRequest that = (GetBudgetPolicyRequest) o;
+ return Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetBudgetPolicyRequest.class).add("policyId", policyId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java
new file mode 100755
index 000000000..bfe3035ac
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesRequest.java
@@ -0,0 +1,104 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** List policies */
+@Generated
+public class ListBudgetPoliciesRequest {
+ /** A filter to apply to the list of policies. */
+ @JsonIgnore
+ @QueryParam("filter_by")
+ private Filter filterBy;
+
+ /**
+ * The maximum number of budget policies to return. If unspecified, at most 100 budget policies
+ * will be returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+ */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve
+ * the subsequent page. If unspecified, the first page will be returned.
+ *
+ * When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match
+ * the call that provided the page token.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ /** The sort specification. */
+ @JsonIgnore
+ @QueryParam("sort_spec")
+ private SortSpec sortSpec;
+
+ public ListBudgetPoliciesRequest setFilterBy(Filter filterBy) {
+ this.filterBy = filterBy;
+ return this;
+ }
+
+ public Filter getFilterBy() {
+ return filterBy;
+ }
+
+ public ListBudgetPoliciesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListBudgetPoliciesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public ListBudgetPoliciesRequest setSortSpec(SortSpec sortSpec) {
+ this.sortSpec = sortSpec;
+ return this;
+ }
+
+ public SortSpec getSortSpec() {
+ return sortSpec;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListBudgetPoliciesRequest that = (ListBudgetPoliciesRequest) o;
+ return Objects.equals(filterBy, that.filterBy)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(sortSpec, that.sortSpec);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(filterBy, pageSize, pageToken, sortSpec);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListBudgetPoliciesRequest.class)
+ .add("filterBy", filterBy)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .add("sortSpec", sortSpec)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java
new file mode 100755
index 000000000..6ab49dbca
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetPoliciesResponse.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** A list of policies. */
+@Generated
+public class ListBudgetPoliciesResponse {
+ /**
+ * A token that can be sent as `page_token` to retrieve the next page. If this field is omitted,
+ * there are no subsequent pages.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("policies")
+ private Collection policies;
+
+ /**
+ * A token that can be sent as `page_token` to retrieve the previous page. In this field is
+ * omitted, there are no previous pages.
+ */
+ @JsonProperty("previous_page_token")
+ private String previousPageToken;
+
+ public ListBudgetPoliciesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListBudgetPoliciesResponse setPolicies(Collection policies) {
+ this.policies = policies;
+ return this;
+ }
+
+ public Collection getPolicies() {
+ return policies;
+ }
+
+ public ListBudgetPoliciesResponse setPreviousPageToken(String previousPageToken) {
+ this.previousPageToken = previousPageToken;
+ return this;
+ }
+
+ public String getPreviousPageToken() {
+ return previousPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListBudgetPoliciesResponse that = (ListBudgetPoliciesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(policies, that.policies)
+ && Objects.equals(previousPageToken, that.previousPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, policies, previousPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListBudgetPoliciesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("policies", policies)
+ .add("previousPageToken", previousPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java
new file mode 100755
index 000000000..a0e3d2442
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SortSpec {
+ /** Whether to sort in descending order. */
+ @JsonProperty("descending")
+ @QueryParam("descending")
+ private Boolean descending;
+
+ /** The filed to sort by */
+ @JsonProperty("field")
+ @QueryParam("field")
+ private SortSpecField field;
+
+ public SortSpec setDescending(Boolean descending) {
+ this.descending = descending;
+ return this;
+ }
+
+ public Boolean getDescending() {
+ return descending;
+ }
+
+ public SortSpec setField(SortSpecField field) {
+ this.field = field;
+ return this;
+ }
+
+ public SortSpecField getField() {
+ return field;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SortSpec that = (SortSpec) o;
+ return Objects.equals(descending, that.descending) && Objects.equals(field, that.field);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(descending, field);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SortSpec.class)
+ .add("descending", descending)
+ .add("field", field)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecField.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecField.java
new file mode 100755
index 000000000..239eb01a2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecField.java
@@ -0,0 +1,10 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum SortSpecField {
+ POLICY_NAME,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
new file mode 100755
index 000000000..d5b5958d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Update a budget policy */
+@Generated
+public class UpdateBudgetPolicyRequest {
+ /** Contains the BudgetPolicy details. */
+ @JsonProperty("policy")
+ private BudgetPolicy policy;
+
+ /** The Id of the policy. This field is generated by Databricks and globally unique. */
+ @JsonIgnore private String policyId;
+
+ public UpdateBudgetPolicyRequest setPolicy(BudgetPolicy policy) {
+ this.policy = policy;
+ return this;
+ }
+
+ public BudgetPolicy getPolicy() {
+ return policy;
+ }
+
+ public UpdateBudgetPolicyRequest setPolicyId(String policyId) {
+ this.policyId = policyId;
+ return this;
+ }
+
+ public String getPolicyId() {
+ return policyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateBudgetPolicyRequest that = (UpdateBudgetPolicyRequest) o;
+ return Objects.equals(policy, that.policy) && Objects.equals(policyId, that.policyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(policy, policyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateBudgetPolicyRequest.class)
+ .add("policy", policy)
+ .add("policyId", policyId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
index 1c6d9472d..82be0cd83 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
@@ -89,10 +89,6 @@ public class CatalogInfo {
@JsonProperty("provisioning_info")
private ProvisioningInfo provisioningInfo;
- /** Kind of catalog securable. */
- @JsonProperty("securable_kind")
- private CatalogInfoSecurableKind securableKind;
-
/** */
@JsonProperty("securable_type")
private String securableType;
@@ -272,15 +268,6 @@ public ProvisioningInfo getProvisioningInfo() {
return provisioningInfo;
}
- public CatalogInfo setSecurableKind(CatalogInfoSecurableKind securableKind) {
- this.securableKind = securableKind;
- return this;
- }
-
- public CatalogInfoSecurableKind getSecurableKind() {
- return securableKind;
- }
-
public CatalogInfo setSecurableType(String securableType) {
this.securableType = securableType;
return this;
@@ -358,7 +345,6 @@ public boolean equals(Object o) {
&& Objects.equals(properties, that.properties)
&& Objects.equals(providerName, that.providerName)
&& Objects.equals(provisioningInfo, that.provisioningInfo)
- && Objects.equals(securableKind, that.securableKind)
&& Objects.equals(securableType, that.securableType)
&& Objects.equals(shareName, that.shareName)
&& Objects.equals(storageLocation, that.storageLocation)
@@ -387,7 +373,6 @@ public int hashCode() {
properties,
providerName,
provisioningInfo,
- securableKind,
securableType,
shareName,
storageLocation,
@@ -416,7 +401,6 @@ public String toString() {
.add("properties", properties)
.add("providerName", providerName)
.add("provisioningInfo", provisioningInfo)
- .add("securableKind", securableKind)
.add("securableType", securableType)
.add("shareName", shareName)
.add("storageLocation", storageLocation)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoSecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoSecurableKind.java
deleted file mode 100755
index 8e3357434..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfoSecurableKind.java
+++ /dev/null
@@ -1,23 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-
-/** Kind of catalog securable. */
-@Generated
-public enum CatalogInfoSecurableKind {
- CATALOG_DELTASHARING,
- CATALOG_FOREIGN_BIGQUERY,
- CATALOG_FOREIGN_DATABRICKS,
- CATALOG_FOREIGN_MYSQL,
- CATALOG_FOREIGN_POSTGRESQL,
- CATALOG_FOREIGN_REDSHIFT,
- CATALOG_FOREIGN_SNOWFLAKE,
- CATALOG_FOREIGN_SQLDW,
- CATALOG_FOREIGN_SQLSERVER,
- CATALOG_INTERNAL,
- CATALOG_STANDARD,
- CATALOG_SYSTEM,
- CATALOG_SYSTEM_DELTASHARING,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
index 79b318619..5e2e8d332 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java
@@ -66,10 +66,6 @@ public class ConnectionInfo {
@JsonProperty("read_only")
private Boolean readOnly;
- /** Kind of connection securable. */
- @JsonProperty("securable_kind")
- private ConnectionInfoSecurableKind securableKind;
-
/** */
@JsonProperty("securable_type")
private String securableType;
@@ -212,15 +208,6 @@ public Boolean getReadOnly() {
return readOnly;
}
- public ConnectionInfo setSecurableKind(ConnectionInfoSecurableKind securableKind) {
- this.securableKind = securableKind;
- return this;
- }
-
- public ConnectionInfoSecurableKind getSecurableKind() {
- return securableKind;
- }
-
public ConnectionInfo setSecurableType(String securableType) {
this.securableType = securableType;
return this;
@@ -276,7 +263,6 @@ public boolean equals(Object o) {
&& Objects.equals(properties, that.properties)
&& Objects.equals(provisioningInfo, that.provisioningInfo)
&& Objects.equals(readOnly, that.readOnly)
- && Objects.equals(securableKind, that.securableKind)
&& Objects.equals(securableType, that.securableType)
&& Objects.equals(updatedAt, that.updatedAt)
&& Objects.equals(updatedBy, that.updatedBy)
@@ -300,7 +286,6 @@ public int hashCode() {
properties,
provisioningInfo,
readOnly,
- securableKind,
securableType,
updatedAt,
updatedBy,
@@ -324,7 +309,6 @@ public String toString() {
.add("properties", properties)
.add("provisioningInfo", provisioningInfo)
.add("readOnly", readOnly)
- .add("securableKind", securableKind)
.add("securableType", securableType)
.add("updatedAt", updatedAt)
.add("updatedBy", updatedBy)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java
deleted file mode 100755
index 19e00f79a..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java
+++ /dev/null
@@ -1,23 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-
-/** Kind of connection securable. */
-@Generated
-public enum ConnectionInfoSecurableKind {
- CONNECTION_BIGQUERY,
- CONNECTION_BUILTIN_HIVE_METASTORE,
- CONNECTION_DATABRICKS,
- CONNECTION_EXTERNAL_HIVE_METASTORE,
- CONNECTION_GLUE,
- CONNECTION_HTTP_BEARER,
- CONNECTION_MYSQL,
- CONNECTION_ONLINE_CATALOG,
- CONNECTION_POSTGRESQL,
- CONNECTION_REDSHIFT,
- CONNECTION_SNOWFLAKE,
- CONNECTION_SQLDW,
- CONNECTION_SQLSERVER,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java
index 76c85e2cd..1ad9e6fbd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java
@@ -3,50 +3,23 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
/** The type of Unity Catalog securable */
@Generated
public enum SecurableType {
- @JsonProperty("catalog")
CATALOG,
-
- @JsonProperty("connection")
+ CLEAN_ROOM,
CONNECTION,
-
- @JsonProperty("credential")
CREDENTIAL,
-
- @JsonProperty("external_location")
EXTERNAL_LOCATION,
-
- @JsonProperty("function")
FUNCTION,
-
- @JsonProperty("metastore")
METASTORE,
-
- @JsonProperty("pipeline")
PIPELINE,
-
- @JsonProperty("provider")
PROVIDER,
-
- @JsonProperty("recipient")
RECIPIENT,
-
- @JsonProperty("schema")
SCHEMA,
-
- @JsonProperty("share")
SHARE,
-
- @JsonProperty("storage_credential")
STORAGE_CREDENTIAL,
-
- @JsonProperty("table")
TABLE,
-
- @JsonProperty("volume")
VOLUME,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java
index a42b25727..8083ee16b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java
@@ -31,6 +31,13 @@ public class TemporaryCredentials {
@JsonProperty("expiration_time")
private Long expirationTime;
+ /**
+ * GCP temporary credentials for API authentication. Read more at
+ * https://developers.google.com/identity/protocols/oauth2/service-account
+ */
+ @JsonProperty("gcp_oauth_token")
+ private GcpOauthToken gcpOauthToken;
+
public TemporaryCredentials setAwsTempCredentials(AwsCredentials awsTempCredentials) {
this.awsTempCredentials = awsTempCredentials;
return this;
@@ -58,6 +65,15 @@ public Long getExpirationTime() {
return expirationTime;
}
+ public TemporaryCredentials setGcpOauthToken(GcpOauthToken gcpOauthToken) {
+ this.gcpOauthToken = gcpOauthToken;
+ return this;
+ }
+
+ public GcpOauthToken getGcpOauthToken() {
+ return gcpOauthToken;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -65,12 +81,13 @@ public boolean equals(Object o) {
TemporaryCredentials that = (TemporaryCredentials) o;
return Objects.equals(awsTempCredentials, that.awsTempCredentials)
&& Objects.equals(azureAad, that.azureAad)
- && Objects.equals(expirationTime, that.expirationTime);
+ && Objects.equals(expirationTime, that.expirationTime)
+ && Objects.equals(gcpOauthToken, that.gcpOauthToken);
}
@Override
public int hashCode() {
- return Objects.hash(awsTempCredentials, azureAad, expirationTime);
+ return Objects.hash(awsTempCredentials, azureAad, expirationTime, gcpOauthToken);
}
@Override
@@ -79,6 +96,7 @@ public String toString() {
.add("awsTempCredentials", awsTempCredentials)
.add("azureAad", azureAad)
.add("expirationTime", expirationTime)
+ .add("gcpOauthToken", gcpOauthToken)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
index dfcb1434d..b817347f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java
@@ -33,6 +33,10 @@ public class UpdateCatalog {
@JsonProperty("new_name")
private String newName;
+ /** A map of key-value properties attached to the securable. */
+ @JsonProperty("options")
+ private Map options;
+
/** Username of current owner of catalog. */
@JsonProperty("owner")
private String owner;
@@ -87,6 +91,15 @@ public String getNewName() {
return newName;
}
+ public UpdateCatalog setOptions(Map options) {
+ this.options = options;
+ return this;
+ }
+
+ public Map getOptions() {
+ return options;
+ }
+
public UpdateCatalog setOwner(String owner) {
this.owner = owner;
return this;
@@ -115,6 +128,7 @@ public boolean equals(Object o) {
&& Objects.equals(isolationMode, that.isolationMode)
&& Objects.equals(name, that.name)
&& Objects.equals(newName, that.newName)
+ && Objects.equals(options, that.options)
&& Objects.equals(owner, that.owner)
&& Objects.equals(properties, that.properties);
}
@@ -122,7 +136,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- comment, enablePredictiveOptimization, isolationMode, name, newName, owner, properties);
+ comment,
+ enablePredictiveOptimization,
+ isolationMode,
+ name,
+ newName,
+ options,
+ owner,
+ properties);
}
@Override
@@ -133,6 +154,7 @@ public String toString() {
.add("isolationMode", isolationMode)
.add("name", name)
.add("newName", newName)
+ .add("options", options)
.add("owner", owner)
.add("properties", properties)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
index 20673bbf9..459ec98c6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java
@@ -5,6 +5,7 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
@Generated
@@ -20,6 +21,18 @@ public class CleanRoomAssetNotebook {
@JsonProperty("notebook_content")
private String notebookContent;
+ /** top-level status derived from all reviews */
+ @JsonProperty("review_state")
+ private CleanRoomNotebookReviewNotebookReviewState reviewState;
+
+ /** All existing approvals or rejections */
+ @JsonProperty("reviews")
+ private Collection reviews;
+
+ /** collaborators that can run the notebook */
+ @JsonProperty("runner_collaborators")
+ private Collection runnerCollaborators;
+
public CleanRoomAssetNotebook setEtag(String etag) {
this.etag = etag;
return this;
@@ -38,17 +51,50 @@ public String getNotebookContent() {
return notebookContent;
}
+ public CleanRoomAssetNotebook setReviewState(
+ CleanRoomNotebookReviewNotebookReviewState reviewState) {
+ this.reviewState = reviewState;
+ return this;
+ }
+
+ public CleanRoomNotebookReviewNotebookReviewState getReviewState() {
+ return reviewState;
+ }
+
+ public CleanRoomAssetNotebook setReviews(Collection reviews) {
+ this.reviews = reviews;
+ return this;
+ }
+
+ public Collection getReviews() {
+ return reviews;
+ }
+
+ public CleanRoomAssetNotebook setRunnerCollaborators(
+ Collection runnerCollaborators) {
+ this.runnerCollaborators = runnerCollaborators;
+ return this;
+ }
+
+ public Collection getRunnerCollaborators() {
+ return runnerCollaborators;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CleanRoomAssetNotebook that = (CleanRoomAssetNotebook) o;
- return Objects.equals(etag, that.etag) && Objects.equals(notebookContent, that.notebookContent);
+ return Objects.equals(etag, that.etag)
+ && Objects.equals(notebookContent, that.notebookContent)
+ && Objects.equals(reviewState, that.reviewState)
+ && Objects.equals(reviews, that.reviews)
+ && Objects.equals(runnerCollaborators, that.runnerCollaborators);
}
@Override
public int hashCode() {
- return Objects.hash(etag, notebookContent);
+ return Objects.hash(etag, notebookContent, reviewState, reviews, runnerCollaborators);
}
@Override
@@ -56,6 +102,9 @@ public String toString() {
return new ToStringer(CleanRoomAssetNotebook.class)
.add("etag", etag)
.add("notebookContent", notebookContent)
+ .add("reviewState", reviewState)
+ .add("reviews", reviews)
+ .add("runnerCollaborators", runnerCollaborators)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetStatusEnum.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetStatusEnum.java
index 247eccb37..6e65b1c1d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetStatusEnum.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetStatusEnum.java
@@ -7,5 +7,6 @@
@Generated
public enum CleanRoomAssetStatusEnum {
ACTIVE,
+ PENDING,
PERMISSION_DENIED,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java
new file mode 100755
index 000000000..765abc3d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReview.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CleanRoomNotebookReview {
+ /** review comment */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** timestamp of when the review was submitted */
+ @JsonProperty("created_at_millis")
+ private Long createdAtMillis;
+
+ /** review outcome */
+ @JsonProperty("review_state")
+ private CleanRoomNotebookReviewNotebookReviewState reviewState;
+
+ /** collaborator alias of the reviewer */
+ @JsonProperty("reviewer_collaborator_alias")
+ private String reviewerCollaboratorAlias;
+
+ public CleanRoomNotebookReview setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public CleanRoomNotebookReview setCreatedAtMillis(Long createdAtMillis) {
+ this.createdAtMillis = createdAtMillis;
+ return this;
+ }
+
+ public Long getCreatedAtMillis() {
+ return createdAtMillis;
+ }
+
+ public CleanRoomNotebookReview setReviewState(
+ CleanRoomNotebookReviewNotebookReviewState reviewState) {
+ this.reviewState = reviewState;
+ return this;
+ }
+
+ public CleanRoomNotebookReviewNotebookReviewState getReviewState() {
+ return reviewState;
+ }
+
+ public CleanRoomNotebookReview setReviewerCollaboratorAlias(String reviewerCollaboratorAlias) {
+ this.reviewerCollaboratorAlias = reviewerCollaboratorAlias;
+ return this;
+ }
+
+ public String getReviewerCollaboratorAlias() {
+ return reviewerCollaboratorAlias;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomNotebookReview that = (CleanRoomNotebookReview) o;
+ return Objects.equals(comment, that.comment)
+ && Objects.equals(createdAtMillis, that.createdAtMillis)
+ && Objects.equals(reviewState, that.reviewState)
+ && Objects.equals(reviewerCollaboratorAlias, that.reviewerCollaboratorAlias);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(comment, createdAtMillis, reviewState, reviewerCollaboratorAlias);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomNotebookReview.class)
+ .add("comment", comment)
+ .add("createdAtMillis", createdAtMillis)
+ .add("reviewState", reviewState)
+ .add("reviewerCollaboratorAlias", reviewerCollaboratorAlias)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java
new file mode 100755
index 000000000..6366690c5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookReviewNotebookReviewState.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum CleanRoomNotebookReviewNotebookReviewState {
+ APPROVED,
+ PENDING,
+ REJECTED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
index b1754d4c1..729eefc55 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
@@ -33,8 +33,9 @@ public CleanRoomsAPI(CleanRoomsService mock) {
*
* Create a new clean room with the specified collaborators. This method is asynchronous; the
* returned name field inside the clean_room field can be used to poll the clean room status,
- * using the :method:cleanrooms/get method. When this method returns, the cluster will be in a
- * PROVISIONING state. The cluster will be usable once it enters an ACTIVE state.
+ * using the :method:cleanrooms/get method. When this method returns, the clean room will be in a
+ * PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean
+ * room will be usable once it enters an ACTIVE state.
*
*
The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the
* metastore.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
index 680c3e292..07453308b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java
@@ -19,8 +19,9 @@ public interface CleanRoomsService {
*
*
Create a new clean room with the specified collaborators. This method is asynchronous; the
* returned name field inside the clean_room field can be used to poll the clean room status,
- * using the :method:cleanrooms/get method. When this method returns, the cluster will be in a
- * PROVISIONING state. The cluster will be usable once it enters an ACTIVE state.
+ * using the :method:cleanrooms/get method. When this method returns, the clean room will be in a
+ * PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean
+ * room will be usable once it enters an ACTIVE state.
*
*
The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the
* metastore.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java
new file mode 100755
index 000000000..a2765d651
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CustomPolicyTag {
+ /**
+ * The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be
+ * “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these tags are
+ * preserved.
+ *
+ *
- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+ * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)
+ */
+ @JsonProperty("key")
+ private String key;
+
+ /**
+ * The value of the tag.
+ *
+ *
- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+ * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)
+ */
+ @JsonProperty("value")
+ private String value;
+
+ public CustomPolicyTag setKey(String key) {
+ this.key = key;
+ return this;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public CustomPolicyTag setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CustomPolicyTag that = (CustomPolicyTag) o;
+ return Objects.equals(key, that.key) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(key, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CustomPolicyTag.class).add("key", key).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java
index 63cfa88f0..bc250469e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java
@@ -6,6 +6,10 @@
@Generated
public enum EventType {
+ ADD_NODES_FAILED,
+ AUTOMATIC_CLUSTER_UPDATE,
+ AUTOSCALING_BACKOFF,
+ AUTOSCALING_FAILED,
AUTOSCALING_STATS_REPORT,
CREATING,
DBFS_DOWN,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
new file mode 100755
index 000000000..a2b487b40
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Cancel the results for the a query for a published, embedded dashboard */
+@Generated
+public class CancelPublishedQueryExecutionRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelPublishedQueryExecutionRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
new file mode 100755
index 000000000..3476fb9ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponse {
+ /** */
+ @JsonProperty("status")
+ private Collection status;
+
+ public CancelQueryExecutionResponse setStatus(
+ Collection status) {
+ this.status = status;
+ return this;
+ }
+
+ public Collection getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
new file mode 100755
index 000000000..d84d7214e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponseStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+ * firm right now.
+ */
+ @JsonProperty("pending")
+ private Empty pending;
+
+ /**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+ * firm right now.
+ */
+ @JsonProperty("success")
+ private Empty success;
+
+ public CancelQueryExecutionResponseStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public CancelQueryExecutionResponseStatus setPending(Empty pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public Empty getPending() {
+ return pending;
+ }
+
+ public CancelQueryExecutionResponseStatus setSuccess(Empty success) {
+ this.success = success;
+ return this;
+ }
+
+ public Empty getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o;
+ return Objects.equals(dataToken, that.dataToken)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, pending, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponseStatus.class)
+ .add("dataToken", dataToken)
+ .add("pending", pending)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
new file mode 100755
index 000000000..8714d62a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
@@ -0,0 +1,32 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm
+ * right now.
+ */
+@Generated
+public class Empty {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Empty.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
new file mode 100755
index 000000000..c5223007c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Execute query request for published Dashboards. Since published dashboards have the option of
+ * running as the publisher, the datasets, warehouse_id are excluded from the request and instead
+ * read from the source (lakeview-config) via the additional parameters (dashboardName and
+ * dashboardRevisionId)
+ */
+@Generated
+public class ExecutePublishedDashboardQueryRequest {
+ /**
+ * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains
+ * the list of datasets, warehouse_id, and embedded_credentials
+ */
+ @JsonProperty("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonProperty("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /**
+ * A dashboard schedule can override the warehouse used as compute for processing the published
+ * dashboard queries
+ */
+ @JsonProperty("override_warehouse_id")
+ private String overrideWarehouseId;
+
+ public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) {
+ this.overrideWarehouseId = overrideWarehouseId;
+ return this;
+ }
+
+ public String getOverrideWarehouseId() {
+ return overrideWarehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(overrideWarehouseId, that.overrideWarehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExecutePublishedDashboardQueryRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("overrideWarehouseId", overrideWarehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java
new file mode 100755
index 000000000..94f12df20
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class ExecuteQueryResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExecuteQueryResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
index 15c434997..3e2aafbc1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
@@ -49,8 +49,9 @@ public class GenieMessage {
/**
* MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
* sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
- * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
- * Executing AI provided SQL query. Get the SQL query result by calling
+ * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
+ * Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
+ * AI provided SQL query. Get the SQL query result by calling
* [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
* status will stay in the `EXECUTING_QUERY` until a client calls
* [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
new file mode 100755
index 000000000..b88922e54
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Read a published dashboard in an embedded ui. */
+@Generated
+public class GetPublishedDashboardEmbeddedRequest {
+ /** UUID identifying the published dashboard. */
+ @JsonIgnore private String dashboardId;
+
+ public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublishedDashboardEmbeddedRequest.class)
+ .add("dashboardId", dashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java
new file mode 100755
index 000000000..5aefc388e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class GetPublishedDashboardEmbeddedResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublishedDashboardEmbeddedResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
new file mode 100755
index 000000000..3e71a00e8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Token-based Lakeview APIs for embedding dashboards in external applications. */
+@Generated
+public class LakeviewEmbeddedAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(LakeviewEmbeddedAPI.class);
+
+ private final LakeviewEmbeddedService impl;
+
+ /** Regular-use constructor */
+ public LakeviewEmbeddedAPI(ApiClient apiClient) {
+ impl = new LakeviewEmbeddedImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) {
+ impl = mock;
+ }
+
+ public void getPublishedDashboardEmbedded(String dashboardId) {
+ getPublishedDashboardEmbedded(
+ new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Read a published dashboard in an embedded ui.
+ *
+ * Get the current published dashboard within an embedded context.
+ */
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ impl.getPublishedDashboardEmbedded(request);
+ }
+
+ public LakeviewEmbeddedService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
new file mode 100755
index 000000000..637c8310e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
@@ -0,0 +1,33 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of LakeviewEmbedded */
+@Generated
+class LakeviewEmbeddedImpl implements LakeviewEmbeddedService {
+ private final ApiClient apiClient;
+
+ public LakeviewEmbeddedImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, GetPublishedDashboardEmbeddedResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
new file mode 100755
index 000000000..ab5f9df94
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
@@ -0,0 +1,22 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Token-based Lakeview APIs for embedding dashboards in external applications.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface LakeviewEmbeddedService {
+ /**
+ * Read a published dashboard in an embedded ui.
+ *
+ *
Get the current published dashboard within an embedded context.
+ */
+ void getPublishedDashboardEmbedded(
+ GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java
index 972f44191..a3454a3f7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java
@@ -7,8 +7,9 @@
/**
* MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
* sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
- * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing
- * AI provided SQL query. Get the SQL query result by calling
+ * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting
+ * for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI
+ * provided SQL query. Get the SQL query result by calling
* [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status
* will stay in the `EXECUTING_QUERY` until a client calls
* [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response
@@ -33,6 +34,7 @@ public enum MessageStatus {
// field.
FETCHING_METADATA, // Fetching metadata from the data sources.
FILTERING_CONTEXT, // Running smart context step to determine relevant context.
+ PENDING_WAREHOUSE, // Waiting for warehouse before the SQL query can start executing.
QUERY_RESULT_EXPIRED, // SQL result is not available anymore. The user needs to execute the query
// again.
SUBMITTED, // Message has been submitted.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
new file mode 100755
index 000000000..f041070b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PendingStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ public PendingStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PendingStatus that = (PendingStatus) o;
+ return Objects.equals(dataToken, that.dataToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
new file mode 100755
index 000000000..958dd8311
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Poll the results for the a query for a published, embedded dashboard */
+@Generated
+public class PollPublishedQueryStatusRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public PollPublishedQueryStatusRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollPublishedQueryStatusRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
new file mode 100755
index 000000000..778e1d961
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponse {
+ /** */
+ @JsonProperty("data")
+ private Collection data;
+
+ public PollQueryStatusResponse setData(Collection data) {
+ this.data = data;
+ return this;
+ }
+
+ public Collection getData() {
+ return data;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponse that = (PollQueryStatusResponse) o;
+ return Objects.equals(data, that.data);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(data);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
new file mode 100755
index 000000000..9de9b2743
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponseData {
+ /** */
+ @JsonProperty("status")
+ private QueryResponseStatus status;
+
+ public PollQueryStatusResponseData setStatus(QueryResponseStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public QueryResponseStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponseData that = (PollQueryStatusResponseData) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
index c09f42a15..f3e972be4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
@@ -40,6 +40,10 @@ public class QueryAttachment {
@JsonProperty("query")
private String query;
+ /** */
+ @JsonProperty("statement_id")
+ private String statementId;
+
/** Name of the query */
@JsonProperty("title")
private String title;
@@ -107,6 +111,15 @@ public String getQuery() {
return query;
}
+ public QueryAttachment setStatementId(String statementId) {
+ this.statementId = statementId;
+ return this;
+ }
+
+ public String getStatementId() {
+ return statementId;
+ }
+
public QueryAttachment setTitle(String title) {
this.title = title;
return this;
@@ -128,6 +141,7 @@ public boolean equals(Object o) {
&& Objects.equals(instructionTitle, that.instructionTitle)
&& Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
&& Objects.equals(query, that.query)
+ && Objects.equals(statementId, that.statementId)
&& Objects.equals(title, that.title);
}
@@ -141,6 +155,7 @@ public int hashCode() {
instructionTitle,
lastUpdatedTimestamp,
query,
+ statementId,
title);
}
@@ -154,6 +169,7 @@ public String toString() {
.add("instructionTitle", instructionTitle)
.add("lastUpdatedTimestamp", lastUpdatedTimestamp)
.add("query", query)
+ .add("statementId", statementId)
.add("title", title)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
new file mode 100755
index 000000000..eb016a2f8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Query execution APIs for AI / BI Dashboards */
+@Generated
+public class QueryExecutionAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class);
+
+ private final QueryExecutionService impl;
+
+ /** Regular-use constructor */
+ public QueryExecutionAPI(ApiClient apiClient) {
+ impl = new QueryExecutionImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public QueryExecutionAPI(QueryExecutionService mock) {
+ impl = mock;
+ }
+
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ String dashboardName, String dashboardRevisionId) {
+ return cancelPublishedQueryExecution(
+ new CancelPublishedQueryExecutionRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ return impl.cancelPublishedQueryExecution(request);
+ }
+
+ public void executePublishedDashboardQuery(String dashboardName, String dashboardRevisionId) {
+ executePublishedDashboardQuery(
+ new ExecutePublishedDashboardQueryRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Execute a query for a published dashboard. */
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ impl.executePublishedDashboardQuery(request);
+ }
+
+ public PollQueryStatusResponse pollPublishedQueryStatus(
+ String dashboardName, String dashboardRevisionId) {
+ return pollPublishedQueryStatus(
+ new PollPublishedQueryStatusRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ return impl.pollPublishedQueryStatus(request);
+ }
+
+ public QueryExecutionService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
new file mode 100755
index 000000000..46db1f805
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of QueryExecution */
+@Generated
+class QueryExecutionImpl implements QueryExecutionService {
+ private final ApiClient apiClient;
+
+ public QueryExecutionImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, CancelQueryExecutionResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ apiClient.execute(req, ExecuteQueryResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, PollQueryStatusResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
new file mode 100755
index 000000000..d30cda5b6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
@@ -0,0 +1,26 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Query execution APIs for AI / BI Dashboards
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface QueryExecutionService {
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest);
+
+ /** Execute a query for a published dashboard. */
+ void executePublishedDashboardQuery(
+ ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest);
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ PollQueryStatusResponse pollPublishedQueryStatus(
+ PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
new file mode 100755
index 000000000..334f3d007
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
@@ -0,0 +1,114 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class QueryResponseStatus {
+ /**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+ * firm right now.
+ */
+ @JsonProperty("canceled")
+ private Empty canceled;
+
+ /**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+ * firm right now.
+ */
+ @JsonProperty("closed")
+ private Empty closed;
+
+ /** */
+ @JsonProperty("pending")
+ private PendingStatus pending;
+
+ /**
+ * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
+ * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
+ * logging purpose to record the statement_id of all QueryResponseStatus.
+ */
+ @JsonProperty("statement_id")
+ private String statementId;
+
+ /** */
+ @JsonProperty("success")
+ private SuccessStatus success;
+
+ public QueryResponseStatus setCanceled(Empty canceled) {
+ this.canceled = canceled;
+ return this;
+ }
+
+ public Empty getCanceled() {
+ return canceled;
+ }
+
+ public QueryResponseStatus setClosed(Empty closed) {
+ this.closed = closed;
+ return this;
+ }
+
+ public Empty getClosed() {
+ return closed;
+ }
+
+ public QueryResponseStatus setPending(PendingStatus pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public PendingStatus getPending() {
+ return pending;
+ }
+
+ public QueryResponseStatus setStatementId(String statementId) {
+ this.statementId = statementId;
+ return this;
+ }
+
+ public String getStatementId() {
+ return statementId;
+ }
+
+ public QueryResponseStatus setSuccess(SuccessStatus success) {
+ this.success = success;
+ return this;
+ }
+
+ public SuccessStatus getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryResponseStatus that = (QueryResponseStatus) o;
+ return Objects.equals(canceled, that.canceled)
+ && Objects.equals(closed, that.closed)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(statementId, that.statementId)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(canceled, closed, pending, statementId, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryResponseStatus.class)
+ .add("canceled", canceled)
+ .add("closed", closed)
+ .add("pending", pending)
+ .add("statementId", statementId)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
new file mode 100755
index 000000000..c54d199d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SuccessStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /** Whether the query result is truncated (either by byte limit or row limit) */
+ @JsonProperty("truncated")
+ private Boolean truncated;
+
+ public SuccessStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public SuccessStatus setTruncated(Boolean truncated) {
+ this.truncated = truncated;
+ return this;
+ }
+
+ public Boolean getTruncated() {
+ return truncated;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SuccessStatus that = (SuccessStatus) o;
+ return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, truncated);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SuccessStatus.class)
+ .add("dataToken", dataToken)
+ .add("truncated", truncated)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
index fcdde2bdc..25f565e50 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java
@@ -18,10 +18,14 @@
* /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
*
*
The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another
- * one for working with directories (`/fs/directories`). Both endpoints, use the standard HTTP
+ * one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP
* methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI
* path. The path is always absolute.
*
+ *
Some Files API client features are currently experimental. To enable them, set
+ * `enable_experimental_files_api_client = True` in your configuration profile or use the
+ * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
+ *
*
[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*/
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
index 4354b81f1..b5103d010 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
@@ -13,10 +13,14 @@
* /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
*
*
The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another
- * one for working with directories (`/fs/directories`). Both endpoints, use the standard HTTP
+ * one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP
* methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI
* path. The path is always absolute.
*
+ *
Some Files API client features are currently experimental. To enable them, set
+ * `enable_experimental_files_api_client = True` in your configuration profile or use the
+ * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
+ *
*
[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*
*
This is the high-level interface, that contains generated methods.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java
new file mode 100755
index 000000000..a48e6a2b9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java
@@ -0,0 +1,49 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Rule based Access Control for Databricks Resources. */
+@Generated
+public class AccessControlAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(AccessControlAPI.class);
+
+ private final AccessControlService impl;
+
+ /** Regular-use constructor */
+ public AccessControlAPI(ApiClient apiClient) {
+ impl = new AccessControlImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public AccessControlAPI(AccessControlService mock) {
+ impl = mock;
+ }
+
+ public CheckPolicyResponse checkPolicy(
+ Actor actor,
+ String permission,
+ String resource,
+ ConsistencyToken consistencyToken,
+ RequestAuthzIdentity authzIdentity) {
+ return checkPolicy(
+ new CheckPolicyRequest()
+ .setActor(actor)
+ .setPermission(permission)
+ .setResource(resource)
+ .setConsistencyToken(consistencyToken)
+ .setAuthzIdentity(authzIdentity));
+ }
+
+ /** Check access policy to a resource. */
+ public CheckPolicyResponse checkPolicy(CheckPolicyRequest request) {
+ return impl.checkPolicy(request);
+ }
+
+ public AccessControlService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java
new file mode 100755
index 000000000..a28f4bc83
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java
@@ -0,0 +1,31 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of AccessControl */
+@Generated
+class AccessControlImpl implements AccessControlService {
+ private final ApiClient apiClient;
+
+ public AccessControlImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CheckPolicyResponse checkPolicy(CheckPolicyRequest request) {
+ String path = "/api/2.0/access-control/check-policy-v2";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, CheckPolicyResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlService.java
new file mode 100755
index 000000000..ede2feb79
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlService.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Rule based Access Control for Databricks Resources.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface AccessControlService {
+ /** Check access policy to a resource. */
+ CheckPolicyResponse checkPolicy(CheckPolicyRequest checkPolicyRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java
new file mode 100755
index 000000000..72dbf807e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Actor.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * represents an identity trying to access a resource - user or a service principal group can be a
+ * principal of a permission set assignment but an actor is always a user or a service principal
+ */
+@Generated
+public class Actor {
+ /** */
+ @JsonProperty("actor_id")
+ @QueryParam("actor_id")
+ private Long actorId;
+
+ public Actor setActorId(Long actorId) {
+ this.actorId = actorId;
+ return this;
+ }
+
+ public Long getActorId() {
+ return actorId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Actor that = (Actor) o;
+ return Objects.equals(actorId, that.actorId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(actorId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Actor.class).add("actorId", actorId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java
new file mode 100755
index 000000000..fae98e5d2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyRequest.java
@@ -0,0 +1,131 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Check access policy to a resource */
+@Generated
+public class CheckPolicyRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("actor")
+ private Actor actor;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("authz_identity")
+ private RequestAuthzIdentity authzIdentity;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("consistency_token")
+ private ConsistencyToken consistencyToken;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("permission")
+ private String permission;
+
+ /**
+ * Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+ * (servicePrincipal.ruleSet/update,
+ * accounts//servicePrincipals//ruleSets/default)
+ */
+ @JsonIgnore
+ @QueryParam("resource")
+ private String resource;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("resource_info")
+ private ResourceInfo resourceInfo;
+
+ public CheckPolicyRequest setActor(Actor actor) {
+ this.actor = actor;
+ return this;
+ }
+
+ public Actor getActor() {
+ return actor;
+ }
+
+ public CheckPolicyRequest setAuthzIdentity(RequestAuthzIdentity authzIdentity) {
+ this.authzIdentity = authzIdentity;
+ return this;
+ }
+
+ public RequestAuthzIdentity getAuthzIdentity() {
+ return authzIdentity;
+ }
+
+ public CheckPolicyRequest setConsistencyToken(ConsistencyToken consistencyToken) {
+ this.consistencyToken = consistencyToken;
+ return this;
+ }
+
+ public ConsistencyToken getConsistencyToken() {
+ return consistencyToken;
+ }
+
+ public CheckPolicyRequest setPermission(String permission) {
+ this.permission = permission;
+ return this;
+ }
+
+ public String getPermission() {
+ return permission;
+ }
+
+ public CheckPolicyRequest setResource(String resource) {
+ this.resource = resource;
+ return this;
+ }
+
+ public String getResource() {
+ return resource;
+ }
+
+ public CheckPolicyRequest setResourceInfo(ResourceInfo resourceInfo) {
+ this.resourceInfo = resourceInfo;
+ return this;
+ }
+
+ public ResourceInfo getResourceInfo() {
+ return resourceInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CheckPolicyRequest that = (CheckPolicyRequest) o;
+ return Objects.equals(actor, that.actor)
+ && Objects.equals(authzIdentity, that.authzIdentity)
+ && Objects.equals(consistencyToken, that.consistencyToken)
+ && Objects.equals(permission, that.permission)
+ && Objects.equals(resource, that.resource)
+ && Objects.equals(resourceInfo, that.resourceInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(actor, authzIdentity, consistencyToken, permission, resource, resourceInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CheckPolicyRequest.class)
+ .add("actor", actor)
+ .add("authzIdentity", authzIdentity)
+ .add("consistencyToken", consistencyToken)
+ .add("permission", permission)
+ .add("resource", resource)
+ .add("resourceInfo", resourceInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java
new file mode 100755
index 000000000..41dcae904
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CheckPolicyResponse.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CheckPolicyResponse {
+ /** */
+ @JsonProperty("consistency_token")
+ private ConsistencyToken consistencyToken;
+
+ /** */
+ @JsonProperty("is_permitted")
+ private Boolean isPermitted;
+
+ public CheckPolicyResponse setConsistencyToken(ConsistencyToken consistencyToken) {
+ this.consistencyToken = consistencyToken;
+ return this;
+ }
+
+ public ConsistencyToken getConsistencyToken() {
+ return consistencyToken;
+ }
+
+ public CheckPolicyResponse setIsPermitted(Boolean isPermitted) {
+ this.isPermitted = isPermitted;
+ return this;
+ }
+
+ public Boolean getIsPermitted() {
+ return isPermitted;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CheckPolicyResponse that = (CheckPolicyResponse) o;
+ return Objects.equals(consistencyToken, that.consistencyToken)
+ && Objects.equals(isPermitted, that.isPermitted);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(consistencyToken, isPermitted);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CheckPolicyResponse.class)
+ .add("consistencyToken", consistencyToken)
+ .add("isPermitted", isPermitted)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java
new file mode 100755
index 000000000..020aaee20
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ConsistencyToken.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ConsistencyToken {
+ /** */
+ @JsonProperty("value")
+ private String value;
+
+ public ConsistencyToken setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConsistencyToken that = (ConsistencyToken) o;
+ return Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConsistencyToken.class).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RequestAuthzIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RequestAuthzIdentity.java
new file mode 100755
index 000000000..a612648e8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/RequestAuthzIdentity.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Defines the identity to be used for authZ of the request on the server side. See one pager for
+ * for more information: http://go/acl/service-identity
+ */
+@Generated
+public enum RequestAuthzIdentity {
+ REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY,
+ REQUEST_AUTHZ_IDENTITY_USER_CONTEXT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java
new file mode 100755
index 000000000..ea6f7556b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ResourceInfo.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ResourceInfo {
+ /** Id of the current resource. */
+ @JsonProperty("id")
+ @QueryParam("id")
+ private String id;
+
+ /** The legacy acl path of the current resource. */
+ @JsonProperty("legacy_acl_path")
+ @QueryParam("legacy_acl_path")
+ private String legacyAclPath;
+
+ /** Parent resource info for the current resource. The parent may have another parent. */
+ @JsonProperty("parent_resource_info")
+ @QueryParam("parent_resource_info")
+ private ResourceInfo parentResourceInfo;
+
+ public ResourceInfo setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public ResourceInfo setLegacyAclPath(String legacyAclPath) {
+ this.legacyAclPath = legacyAclPath;
+ return this;
+ }
+
+ public String getLegacyAclPath() {
+ return legacyAclPath;
+ }
+
+ public ResourceInfo setParentResourceInfo(ResourceInfo parentResourceInfo) {
+ this.parentResourceInfo = parentResourceInfo;
+ return this;
+ }
+
+ public ResourceInfo getParentResourceInfo() {
+ return parentResourceInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ResourceInfo that = (ResourceInfo) o;
+ return Objects.equals(id, that.id)
+ && Objects.equals(legacyAclPath, that.legacyAclPath)
+ && Objects.equals(parentResourceInfo, that.parentResourceInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, legacyAclPath, parentResourceInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ResourceInfo.class)
+ .add("id", id)
+ .add("legacyAclPath", legacyAclPath)
+ .add("parentResourceInfo", parentResourceInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
index 932a1f363..ebf3edff6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
@@ -51,6 +51,15 @@ public class BaseRun {
@JsonProperty("description")
private String description;
+ /**
+ * effective_performance_target is the actual performance target used by the run during execution.
+ * effective_performance_target can differ from performance_target depending on if the job was
+ * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+ * override the value for the run (ex. RunNow).
+ */
+ @JsonProperty("effective_performance_target")
+ private PerformanceTarget effectivePerformanceTarget;
+
/**
* The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
* field is set to 0 if the job is still running.
@@ -277,6 +286,15 @@ public String getDescription() {
return description;
}
+ public BaseRun setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) {
+ this.effectivePerformanceTarget = effectivePerformanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getEffectivePerformanceTarget() {
+ return effectivePerformanceTarget;
+ }
+
public BaseRun setEndTime(Long endTime) {
this.endTime = endTime;
return this;
@@ -522,6 +540,7 @@ public boolean equals(Object o) {
&& Objects.equals(clusterSpec, that.clusterSpec)
&& Objects.equals(creatorUserName, that.creatorUserName)
&& Objects.equals(description, that.description)
+ && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget)
&& Objects.equals(endTime, that.endTime)
&& Objects.equals(executionDuration, that.executionDuration)
&& Objects.equals(gitSource, that.gitSource)
@@ -559,6 +578,7 @@ public int hashCode() {
clusterSpec,
creatorUserName,
description,
+ effectivePerformanceTarget,
endTime,
executionDuration,
gitSource,
@@ -596,6 +616,7 @@ public String toString() {
.add("clusterSpec", clusterSpec)
.add("creatorUserName", creatorUserName)
.add("description", description)
+ .add("effectivePerformanceTarget", effectivePerformanceTarget)
.add("endTime", endTime)
.add("executionDuration", executionDuration)
.add("gitSource", gitSource)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java
index 7abc50cb6..2540ff3db 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java
@@ -15,6 +15,7 @@ public enum CleanRoomTaskRunLifeCycleState {
PENDING,
QUEUED,
RUNNING,
+ RUN_LIFE_CYCLE_STATE_UNSPECIFIED,
SKIPPED,
TERMINATED,
TERMINATING,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java
index 197670f52..0c4a6e854 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java
@@ -16,6 +16,7 @@ public enum CleanRoomTaskRunResultState {
EXCLUDED,
FAILED,
MAXIMUM_CONCURRENT_RUNS_REACHED,
+ RUN_RESULT_STATE_UNSPECIFIED,
SUCCESS,
SUCCESS_WITH_FAILURES,
TIMEDOUT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java
new file mode 100755
index 000000000..b2b669568
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput {
+ /** The run state of the clean rooms notebook task. */
+ @JsonProperty("clean_room_job_run_state")
+ private CleanRoomTaskRunState cleanRoomJobRunState;
+
+ /** The notebook output for the clean room run */
+ @JsonProperty("notebook_output")
+ private NotebookOutput notebookOutput;
+
+ /** Information on how to access the output schema for the clean room run */
+ @JsonProperty("output_schema_info")
+ private OutputSchemaInfo outputSchemaInfo;
+
+ public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput setCleanRoomJobRunState(
+ CleanRoomTaskRunState cleanRoomJobRunState) {
+ this.cleanRoomJobRunState = cleanRoomJobRunState;
+ return this;
+ }
+
+ public CleanRoomTaskRunState getCleanRoomJobRunState() {
+ return cleanRoomJobRunState;
+ }
+
+ public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput setNotebookOutput(
+ NotebookOutput notebookOutput) {
+ this.notebookOutput = notebookOutput;
+ return this;
+ }
+
+ public NotebookOutput getNotebookOutput() {
+ return notebookOutput;
+ }
+
+ public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput setOutputSchemaInfo(
+ OutputSchemaInfo outputSchemaInfo) {
+ this.outputSchemaInfo = outputSchemaInfo;
+ return this;
+ }
+
+ public OutputSchemaInfo getOutputSchemaInfo() {
+ return outputSchemaInfo;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput that =
+ (CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput) o;
+ return Objects.equals(cleanRoomJobRunState, that.cleanRoomJobRunState)
+ && Objects.equals(notebookOutput, that.notebookOutput)
+ && Objects.equals(outputSchemaInfo, that.outputSchemaInfo);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(cleanRoomJobRunState, notebookOutput, outputSchemaInfo);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput.class)
+ .add("cleanRoomJobRunState", cleanRoomJobRunState)
+ .add("notebookOutput", notebookOutput)
+ .add("outputSchemaInfo", outputSchemaInfo)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index d1f4f2c36..8fa81b9d6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -128,6 +128,13 @@ public class CreateJob {
@JsonProperty("parameters")
private Collection parameters;
+ /**
+ * PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+ * should be.
+ */
+ @JsonProperty("performance_target")
+ private PerformanceTarget performanceTarget;
+
/** The queue settings of the job. */
@JsonProperty("queue")
private QueueSettings queue;
@@ -325,6 +332,15 @@ public Collection getParameters() {
return parameters;
}
+ public CreateJob setPerformanceTarget(PerformanceTarget performanceTarget) {
+ this.performanceTarget = performanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getPerformanceTarget() {
+ return performanceTarget;
+ }
+
public CreateJob setQueue(QueueSettings queue) {
this.queue = queue;
return this;
@@ -418,6 +434,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notificationSettings, that.notificationSettings)
&& Objects.equals(parameters, that.parameters)
+ && Objects.equals(performanceTarget, that.performanceTarget)
&& Objects.equals(queue, that.queue)
&& Objects.equals(runAs, that.runAs)
&& Objects.equals(schedule, that.schedule)
@@ -447,6 +464,7 @@ public int hashCode() {
name,
notificationSettings,
parameters,
+ performanceTarget,
queue,
runAs,
schedule,
@@ -476,6 +494,7 @@ public String toString() {
.add("name", name)
.add("notificationSettings", notificationSettings)
.add("parameters", parameters)
+ .add("performanceTarget", performanceTarget)
.add("queue", queue)
.add("runAs", runAs)
.add("schedule", schedule)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index 71ded0884..eb3e89525 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -124,6 +124,13 @@ public class JobSettings {
@JsonProperty("parameters")
private Collection parameters;
+ /**
+ * PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+ * should be.
+ */
+ @JsonProperty("performance_target")
+ private PerformanceTarget performanceTarget;
+
/** The queue settings of the job. */
@JsonProperty("queue")
private QueueSettings queue;
@@ -312,6 +319,15 @@ public Collection getParameters() {
return parameters;
}
+ public JobSettings setPerformanceTarget(PerformanceTarget performanceTarget) {
+ this.performanceTarget = performanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getPerformanceTarget() {
+ return performanceTarget;
+ }
+
public JobSettings setQueue(QueueSettings queue) {
this.queue = queue;
return this;
@@ -404,6 +420,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notificationSettings, that.notificationSettings)
&& Objects.equals(parameters, that.parameters)
+ && Objects.equals(performanceTarget, that.performanceTarget)
&& Objects.equals(queue, that.queue)
&& Objects.equals(runAs, that.runAs)
&& Objects.equals(schedule, that.schedule)
@@ -432,6 +449,7 @@ public int hashCode() {
name,
notificationSettings,
parameters,
+ performanceTarget,
queue,
runAs,
schedule,
@@ -460,6 +478,7 @@ public String toString() {
.add("name", name)
.add("notificationSettings", notificationSettings)
.add("parameters", parameters)
+ .add("performanceTarget", performanceTarget)
.add("queue", queue)
.add("runAs", runAs)
.add("schedule", schedule)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java
new file mode 100755
index 000000000..83d2cfa9a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Stores the catalog name, schema name, and the output schema expiration time for the clean room
+ * run.
+ */
+@Generated
+public class OutputSchemaInfo {
+ /** */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** The expiration time for the output schema as a Unix timestamp in milliseconds. */
+ @JsonProperty("expiration_time")
+ private Long expirationTime;
+
+ /** */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ public OutputSchemaInfo setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public OutputSchemaInfo setExpirationTime(Long expirationTime) {
+ this.expirationTime = expirationTime;
+ return this;
+ }
+
+ public Long getExpirationTime() {
+ return expirationTime;
+ }
+
+ public OutputSchemaInfo setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ OutputSchemaInfo that = (OutputSchemaInfo) o;
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(expirationTime, that.expirationTime)
+ && Objects.equals(schemaName, that.schemaName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogName, expirationTime, schemaName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(OutputSchemaInfo.class)
+ .add("catalogName", catalogName)
+ .add("expirationTime", expirationTime)
+ .add("schemaName", schemaName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java
new file mode 100755
index 000000000..88f9fcd78
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java
@@ -0,0 +1,16 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run
+ * on serverless compute should be. The performance mode on the job or pipeline should map to a
+ * performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).
+ */
+@Generated
+public enum PerformanceTarget {
+ COST_OPTIMIZED,
+ PERFORMANCE_OPTIMIZED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
index bee02f004..09bf4f5f5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
@@ -52,6 +52,15 @@ public class Run {
@JsonProperty("description")
private String description;
+ /**
+ * effective_performance_target is the actual performance target used by the run during execution.
+ * effective_performance_target can differ from performance_target depending on if the job was
+ * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+ * override the value for the run (ex. RunNow).
+ */
+ @JsonProperty("effective_performance_target")
+ private PerformanceTarget effectivePerformanceTarget;
+
/**
* The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
* field is set to 0 if the job is still running.
@@ -286,6 +295,15 @@ public String getDescription() {
return description;
}
+ public Run setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) {
+ this.effectivePerformanceTarget = effectivePerformanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getEffectivePerformanceTarget() {
+ return effectivePerformanceTarget;
+ }
+
public Run setEndTime(Long endTime) {
this.endTime = endTime;
return this;
@@ -549,6 +567,7 @@ public boolean equals(Object o) {
&& Objects.equals(clusterSpec, that.clusterSpec)
&& Objects.equals(creatorUserName, that.creatorUserName)
&& Objects.equals(description, that.description)
+ && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget)
&& Objects.equals(endTime, that.endTime)
&& Objects.equals(executionDuration, that.executionDuration)
&& Objects.equals(gitSource, that.gitSource)
@@ -588,6 +607,7 @@ public int hashCode() {
clusterSpec,
creatorUserName,
description,
+ effectivePerformanceTarget,
endTime,
executionDuration,
gitSource,
@@ -627,6 +647,7 @@ public String toString() {
.add("clusterSpec", clusterSpec)
.add("creatorUserName", creatorUserName)
.add("description", description)
+ .add("effectivePerformanceTarget", effectivePerformanceTarget)
.add("endTime", endTime)
.add("executionDuration", executionDuration)
.add("gitSource", gitSource)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
index d99cb5c75..d76e352b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
@@ -84,6 +84,14 @@ public class RunNow {
@JsonProperty("only")
private Collection only;
+ /**
+ * PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+ * compute should be. For RunNow request, the run will execute with this settings instead of ones
+ * defined in job.
+ */
+ @JsonProperty("performance_target")
+ private PerformanceTarget performanceTarget;
+
/** Controls whether the pipeline should perform a full refresh */
@JsonProperty("pipeline_params")
private PipelineParams pipelineParams;
@@ -205,6 +213,15 @@ public Collection getOnly() {
return only;
}
+ public RunNow setPerformanceTarget(PerformanceTarget performanceTarget) {
+ this.performanceTarget = performanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getPerformanceTarget() {
+ return performanceTarget;
+ }
+
public RunNow setPipelineParams(PipelineParams pipelineParams) {
this.pipelineParams = pipelineParams;
return this;
@@ -271,6 +288,7 @@ public boolean equals(Object o) {
&& Objects.equals(jobParameters, that.jobParameters)
&& Objects.equals(notebookParams, that.notebookParams)
&& Objects.equals(only, that.only)
+ && Objects.equals(performanceTarget, that.performanceTarget)
&& Objects.equals(pipelineParams, that.pipelineParams)
&& Objects.equals(pythonNamedParams, that.pythonNamedParams)
&& Objects.equals(pythonParams, that.pythonParams)
@@ -289,6 +307,7 @@ public int hashCode() {
jobParameters,
notebookParams,
only,
+ performanceTarget,
pipelineParams,
pythonNamedParams,
pythonParams,
@@ -307,6 +326,7 @@ public String toString() {
.add("jobParameters", jobParameters)
.add("notebookParams", notebookParams)
.add("only", only)
+ .add("performanceTarget", performanceTarget)
.add("pipelineParams", pipelineParams)
.add("pythonNamedParams", pythonNamedParams)
.add("pythonParams", pythonParams)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
index ebfe9b747..711e82a8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
@@ -10,6 +10,10 @@
/** Run output was retrieved successfully. */
@Generated
public class RunOutput {
+ /** The output of a clean rooms notebook task, if available */
+ @JsonProperty("clean_rooms_notebook_output")
+ private CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput;
+
/** The output of a dbt task, if available. */
@JsonProperty("dbt_output")
private DbtOutput dbtOutput;
@@ -69,6 +73,16 @@ public class RunOutput {
@JsonProperty("sql_output")
private SqlOutput sqlOutput;
+ public RunOutput setCleanRoomsNotebookOutput(
+ CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput cleanRoomsNotebookOutput) {
+ this.cleanRoomsNotebookOutput = cleanRoomsNotebookOutput;
+ return this;
+ }
+
+ public CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput getCleanRoomsNotebookOutput() {
+ return cleanRoomsNotebookOutput;
+ }
+
public RunOutput setDbtOutput(DbtOutput dbtOutput) {
this.dbtOutput = dbtOutput;
return this;
@@ -164,7 +178,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RunOutput that = (RunOutput) o;
- return Objects.equals(dbtOutput, that.dbtOutput)
+ return Objects.equals(cleanRoomsNotebookOutput, that.cleanRoomsNotebookOutput)
+ && Objects.equals(dbtOutput, that.dbtOutput)
&& Objects.equals(error, that.error)
&& Objects.equals(errorTrace, that.errorTrace)
&& Objects.equals(info, that.info)
@@ -179,6 +194,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ cleanRoomsNotebookOutput,
dbtOutput,
error,
errorTrace,
@@ -194,6 +210,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(RunOutput.class)
+ .add("cleanRoomsNotebookOutput", cleanRoomsNotebookOutput)
.add("dbtOutput", dbtOutput)
.add("error", error)
.add("errorTrace", errorTrace)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
index 8d9371cdb..aec2fdf23 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
@@ -72,6 +72,22 @@ public class RunTask {
@JsonProperty("description")
private String description;
+ /**
+ * Denotes whether or not the task was disabled by the user. Disabled tasks do not execute and are
+ * immediately skipped as soon as they are unblocked.
+ */
+ @JsonProperty("disabled")
+ private Boolean disabled;
+
+ /**
+ * effective_performance_target is the actual performance target used by the run during execution.
+ * effective_performance_target can differ from performance_target depending on if the job was
+ * eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was
+ * provided for the run (ex. RunNow).
+ */
+ @JsonProperty("effective_performance_target")
+ private PerformanceTarget effectivePerformanceTarget;
+
/**
* An optional set of email addresses notified when the task run begins or completes. The default
* behavior is to not send any emails.
@@ -354,6 +370,24 @@ public String getDescription() {
return description;
}
+ public RunTask setDisabled(Boolean disabled) {
+ this.disabled = disabled;
+ return this;
+ }
+
+ public Boolean getDisabled() {
+ return disabled;
+ }
+
+ public RunTask setEffectivePerformanceTarget(PerformanceTarget effectivePerformanceTarget) {
+ this.effectivePerformanceTarget = effectivePerformanceTarget;
+ return this;
+ }
+
+ public PerformanceTarget getEffectivePerformanceTarget() {
+ return effectivePerformanceTarget;
+ }
+
public RunTask setEmailNotifications(JobEmailNotifications emailNotifications) {
this.emailNotifications = emailNotifications;
return this;
@@ -655,6 +689,8 @@ public boolean equals(Object o) {
&& Objects.equals(dbtTask, that.dbtTask)
&& Objects.equals(dependsOn, that.dependsOn)
&& Objects.equals(description, that.description)
+ && Objects.equals(disabled, that.disabled)
+ && Objects.equals(effectivePerformanceTarget, that.effectivePerformanceTarget)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(endTime, that.endTime)
&& Objects.equals(environmentKey, that.environmentKey)
@@ -700,6 +736,8 @@ public int hashCode() {
dbtTask,
dependsOn,
description,
+ disabled,
+ effectivePerformanceTarget,
emailNotifications,
endTime,
environmentKey,
@@ -745,6 +783,8 @@ public String toString() {
.add("dbtTask", dbtTask)
.add("dependsOn", dependsOn)
.add("description", description)
+ .add("disabled", disabled)
+ .add("effectivePerformanceTarget", effectivePerformanceTarget)
.add("emailNotifications", emailNotifications)
.add("endTime", endTime)
.add("environmentKey", environmentKey)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java
index 5c2959c91..4978f8544 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java
@@ -37,6 +37,10 @@ public class SparkJarTask {
@JsonProperty("parameters")
private Collection parameters;
+ /** Deprecated. A value of `false` is no longer supported. */
+ @JsonProperty("run_as_repl")
+ private Boolean runAsRepl;
+
public SparkJarTask setJarUri(String jarUri) {
this.jarUri = jarUri;
return this;
@@ -64,6 +68,15 @@ public Collection getParameters() {
return parameters;
}
+ public SparkJarTask setRunAsRepl(Boolean runAsRepl) {
+ this.runAsRepl = runAsRepl;
+ return this;
+ }
+
+ public Boolean getRunAsRepl() {
+ return runAsRepl;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -71,12 +84,13 @@ public boolean equals(Object o) {
SparkJarTask that = (SparkJarTask) o;
return Objects.equals(jarUri, that.jarUri)
&& Objects.equals(mainClassName, that.mainClassName)
- && Objects.equals(parameters, that.parameters);
+ && Objects.equals(parameters, that.parameters)
+ && Objects.equals(runAsRepl, that.runAsRepl);
}
@Override
public int hashCode() {
- return Objects.hash(jarUri, mainClassName, parameters);
+ return Objects.hash(jarUri, mainClassName, parameters, runAsRepl);
}
@Override
@@ -85,6 +99,7 @@ public String toString() {
.add("jarUri", jarUri)
.add("mainClassName", mainClassName)
.add("parameters", parameters)
+ .add("runAsRepl", runAsRepl)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
index 5f99e706f..5028200fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java
@@ -44,6 +44,7 @@
*/
@Generated
public enum TerminationCodeCode {
+ BUDGET_POLICY_LIMIT_EXCEEDED,
CANCELED, // The run was canceled during execution by the platform; for
// example, if the maximum run duration was exceeded.
CLOUD_FAILURE, // The run failed due to a cloud provider issue. Refer to the state message for
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java
index e0315ebf7..a1c47bb8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CreateCustomAppIntegration.java
@@ -35,6 +35,13 @@ public class CreateCustomAppIntegration {
@JsonProperty("token_access_policy")
private TokenAccessPolicy tokenAccessPolicy;
+ /**
+ * Scopes that will need to be consented by end user to mint the access token. If the user does
+ * not authorize the access token will not be minted. Must be a subset of scopes.
+ */
+ @JsonProperty("user_authorized_scopes")
+ private Collection userAuthorizedScopes;
+
public CreateCustomAppIntegration setConfidential(Boolean confidential) {
this.confidential = confidential;
return this;
@@ -80,6 +87,16 @@ public TokenAccessPolicy getTokenAccessPolicy() {
return tokenAccessPolicy;
}
+ public CreateCustomAppIntegration setUserAuthorizedScopes(
+ Collection userAuthorizedScopes) {
+ this.userAuthorizedScopes = userAuthorizedScopes;
+ return this;
+ }
+
+ public Collection getUserAuthorizedScopes() {
+ return userAuthorizedScopes;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -89,12 +106,14 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(redirectUrls, that.redirectUrls)
&& Objects.equals(scopes, that.scopes)
- && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy);
+ && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy)
+ && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes);
}
@Override
public int hashCode() {
- return Objects.hash(confidential, name, redirectUrls, scopes, tokenAccessPolicy);
+ return Objects.hash(
+ confidential, name, redirectUrls, scopes, tokenAccessPolicy, userAuthorizedScopes);
}
@Override
@@ -105,6 +124,7 @@ public String toString() {
.add("redirectUrls", redirectUrls)
.add("scopes", scopes)
.add("tokenAccessPolicy", tokenAccessPolicy)
+ .add("userAuthorizedScopes", userAuthorizedScopes)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java
index 72e53a857..b085c10d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/GetCustomAppIntegrationOutput.java
@@ -52,6 +52,13 @@ public class GetCustomAppIntegrationOutput {
@JsonProperty("token_access_policy")
private TokenAccessPolicy tokenAccessPolicy;
+ /**
+ * Scopes that will need to be consented by end user to mint the access token. If the user does
+ * not authorize the access token will not be minted. Must be a subset of scopes.
+ */
+ @JsonProperty("user_authorized_scopes")
+ private Collection userAuthorizedScopes;
+
public GetCustomAppIntegrationOutput setClientId(String clientId) {
this.clientId = clientId;
return this;
@@ -142,6 +149,16 @@ public TokenAccessPolicy getTokenAccessPolicy() {
return tokenAccessPolicy;
}
+ public GetCustomAppIntegrationOutput setUserAuthorizedScopes(
+ Collection userAuthorizedScopes) {
+ this.userAuthorizedScopes = userAuthorizedScopes;
+ return this;
+ }
+
+ public Collection getUserAuthorizedScopes() {
+ return userAuthorizedScopes;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -156,7 +173,8 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(redirectUrls, that.redirectUrls)
&& Objects.equals(scopes, that.scopes)
- && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy);
+ && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy)
+ && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes);
}
@Override
@@ -171,7 +189,8 @@ public int hashCode() {
name,
redirectUrls,
scopes,
- tokenAccessPolicy);
+ tokenAccessPolicy,
+ userAuthorizedScopes);
}
@Override
@@ -187,6 +206,7 @@ public String toString() {
.add("redirectUrls", redirectUrls)
.add("scopes", scopes)
.add("tokenAccessPolicy", tokenAccessPolicy)
+ .add("userAuthorizedScopes", userAuthorizedScopes)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java
index 7cb58a881..e310550a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java
@@ -18,10 +18,24 @@ public class UpdateCustomAppIntegration {
@JsonProperty("redirect_urls")
private Collection redirectUrls;
+ /**
+ * List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect
+ * URIs this will fully replace the existing values instead of appending
+ */
+ @JsonProperty("scopes")
+ private Collection scopes;
+
/** Token access policy to be updated in the custom OAuth app integration */
@JsonProperty("token_access_policy")
private TokenAccessPolicy tokenAccessPolicy;
+ /**
+ * Scopes that will need to be consented by end user to mint the access token. If the user does
+ * not authorize the access token will not be minted. Must be a subset of scopes.
+ */
+ @JsonProperty("user_authorized_scopes")
+ private Collection userAuthorizedScopes;
+
public UpdateCustomAppIntegration setIntegrationId(String integrationId) {
this.integrationId = integrationId;
return this;
@@ -40,6 +54,15 @@ public Collection getRedirectUrls() {
return redirectUrls;
}
+ public UpdateCustomAppIntegration setScopes(Collection scopes) {
+ this.scopes = scopes;
+ return this;
+ }
+
+ public Collection getScopes() {
+ return scopes;
+ }
+
public UpdateCustomAppIntegration setTokenAccessPolicy(TokenAccessPolicy tokenAccessPolicy) {
this.tokenAccessPolicy = tokenAccessPolicy;
return this;
@@ -49,6 +72,16 @@ public TokenAccessPolicy getTokenAccessPolicy() {
return tokenAccessPolicy;
}
+ public UpdateCustomAppIntegration setUserAuthorizedScopes(
+ Collection userAuthorizedScopes) {
+ this.userAuthorizedScopes = userAuthorizedScopes;
+ return this;
+ }
+
+ public Collection getUserAuthorizedScopes() {
+ return userAuthorizedScopes;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -56,12 +89,15 @@ public boolean equals(Object o) {
UpdateCustomAppIntegration that = (UpdateCustomAppIntegration) o;
return Objects.equals(integrationId, that.integrationId)
&& Objects.equals(redirectUrls, that.redirectUrls)
- && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy);
+ && Objects.equals(scopes, that.scopes)
+ && Objects.equals(tokenAccessPolicy, that.tokenAccessPolicy)
+ && Objects.equals(userAuthorizedScopes, that.userAuthorizedScopes);
}
@Override
public int hashCode() {
- return Objects.hash(integrationId, redirectUrls, tokenAccessPolicy);
+ return Objects.hash(
+ integrationId, redirectUrls, scopes, tokenAccessPolicy, userAuthorizedScopes);
}
@Override
@@ -69,7 +105,9 @@ public String toString() {
return new ToStringer(UpdateCustomAppIntegration.class)
.add("integrationId", integrationId)
.add("redirectUrls", redirectUrls)
+ .add("scopes", scopes)
.add("tokenAccessPolicy", tokenAccessPolicy)
+ .add("userAuthorizedScopes", userAuthorizedScopes)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java
index 35553b03b..3407fd4c8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java
@@ -9,12 +9,7 @@
@Generated
public class AiGatewayGuardrailPiiBehavior {
- /**
- * Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
- * guardrail and the request contains PII, the request is not sent to the model server and 400
- * status code is returned; if 'BLOCK' is set for the output guardrail and the model response
- * contains PII, the PII info in the response is redacted and 400 status code is returned.
- */
+ /** Configuration for input guardrail filters. */
@JsonProperty("behavior")
private AiGatewayGuardrailPiiBehaviorBehavior behavior;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorBehavior.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorBehavior.java
index 8e15e4264..36ad1bcfd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorBehavior.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorBehavior.java
@@ -4,12 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
- * guardrail and the request contains PII, the request is not sent to the model server and 400
- * status code is returned; if 'BLOCK' is set for the output guardrail and the model response
- * contains PII, the PII info in the response is redacted and 400 status code is returned.
- */
@Generated
public enum AiGatewayGuardrailPiiBehaviorBehavior {
BLOCK,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
index 72f3def22..a2870c1f5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
@@ -5,10 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
- * being the default if not specified.
- */
@Generated
public enum AiGatewayRateLimitKey {
@JsonProperty("endpoint")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitRenewalPeriod.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitRenewalPeriod.java
index 560ea44ae..e60bb88e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitRenewalPeriod.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitRenewalPeriod.java
@@ -5,7 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/** Renewal period field for a rate limit. Currently, only 'minute' is supported. */
@Generated
public enum AiGatewayRateLimitRenewalPeriod {
@JsonProperty("minute")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java
index f875bdd92..176f17334 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java
@@ -11,9 +11,9 @@
public class AmazonBedrockConfig {
/**
* The Databricks secret key reference for an AWS access key ID with permissions to interact with
- * Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You
- * must provide an API key using one of the following fields: `aws_access_key_id` or
- * `aws_access_key_id_plaintext`.
+ * Bedrock services. If you prefer to paste your API key directly, see
+ * `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields:
+ * `aws_access_key_id` or `aws_access_key_id_plaintext`.
*/
@JsonProperty("aws_access_key_id")
private String awsAccessKeyId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java
index 4a4516289..1159c0798 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java
@@ -5,10 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
- * Anthropic, Cohere, AI21Labs, Amazon.
- */
@Generated
public enum AmazonBedrockConfigBedrockProvider {
@JsonProperty("ai21labs")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
index 34ff0f488..cb1665074 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java
@@ -9,7 +9,10 @@
@Generated
public class AutoCaptureConfigOutput {
- /** The name of the catalog in Unity Catalog. */
+ /**
+ * The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name
+ * if the inference table is already enabled.
+ */
@JsonProperty("catalog_name")
private String catalogName;
@@ -17,7 +20,10 @@ public class AutoCaptureConfigOutput {
@JsonProperty("enabled")
private Boolean enabled;
- /** The name of the schema in Unity Catalog. */
+ /**
+ * The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if
+ * the inference table is already enabled.
+ */
@JsonProperty("schema_name")
private String schemaName;
@@ -25,7 +31,10 @@ public class AutoCaptureConfigOutput {
@JsonProperty("state")
private AutoCaptureState state;
- /** The prefix of the table in Unity Catalog. */
+ /**
+ * The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if
+ * the inference table is already enabled.
+ */
@JsonProperty("table_name_prefix")
private String tableNamePrefix;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java
index 948367afe..e9362231f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java
@@ -11,8 +11,8 @@
@Generated
public class CreateServingEndpoint {
/**
- * The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
- * supported as of now.
+ * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and
+ * provisioned throughput endpoints are currently supported.
*/
@JsonProperty("ai_gateway")
private AiGatewayConfig aiGateway;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java
index 0d1f893f6..ee13ccec8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details necessary to query this object's API through the DataPlane APIs. */
@Generated
public class DataPlaneInfo {
/** Authorization details as a string. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java
index a12d2fe61..ba579de1b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java
@@ -10,7 +10,7 @@
/** Delete a serving endpoint */
@Generated
public class DeleteServingEndpointRequest {
- /** The name of the serving endpoint. This field is required. */
+ /** */
@JsonIgnore private String name;
public DeleteServingEndpointRequest setName(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
index f00c2f2bb..5e0034ef9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java
@@ -13,7 +13,9 @@
public class EndpointCoreConfigInput {
/**
* Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog.
+ * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+ * updating existing provisioned throughput endpoints that never have inference table configured;
+ * in these cases please use AI Gateway to manage inference tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigInput autoCaptureConfig;
@@ -21,21 +23,18 @@ public class EndpointCoreConfigInput {
/** The name of the serving endpoint to update. This field is required. */
@JsonIgnore private String name;
- /**
- * A list of served entities for the endpoint to serve. A serving endpoint can have up to 15
- * served entities.
- */
+ /** The list of served entities under the serving endpoint config. */
@JsonProperty("served_entities")
private Collection servedEntities;
/**
- * (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
- * serving endpoint can have up to 15 served models.
+ * (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+ * config.
*/
@JsonProperty("served_models")
private Collection servedModels;
- /** The traffic config defining how invocations to the serving endpoint should be routed. */
+ /** The traffic configuration associated with the serving endpoint config. */
@JsonProperty("traffic_config")
private TrafficConfig trafficConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
index 695d87d20..253eaba34 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigOutput.java
@@ -12,7 +12,9 @@
public class EndpointCoreConfigOutput {
/**
* Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog.
+ * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+ * updating existing provisioned throughput endpoints that never have inference table configured;
+ * in these cases please use AI Gateway to manage inference tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigOutput autoCaptureConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
index 6f635ed13..b25e58be5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java
@@ -12,7 +12,9 @@
public class EndpointPendingConfig {
/**
* Configuration for Inference Tables which automatically logs requests and responses to Unity
- * Catalog.
+ * Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+ * updating existing provisioned throughput endpoints that never have inference table configured;
+ * in these cases please use AI Gateway to manage inference tables.
*/
@JsonProperty("auto_capture_config")
private AutoCaptureConfigOutput autoCaptureConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java
index 0f642b133..16b6905f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointState.java
@@ -13,7 +13,7 @@ public class EndpointState {
* The state of an endpoint's config update. This informs the user if the pending_config is in
* progress, if the update failed, or if there is no update in progress. Note that if the
* endpoint's config_update state value is IN_PROGRESS, another update can not be made until the
- * update completes or fails."
+ * update completes or fails.
*/
@JsonProperty("config_update")
private EndpointStateConfigUpdate configUpdate;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateConfigUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateConfigUpdate.java
index bb7d76acd..c3ea6825c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateConfigUpdate.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateConfigUpdate.java
@@ -4,12 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * The state of an endpoint's config update. This informs the user if the pending_config is in
- * progress, if the update failed, or if there is no update in progress. Note that if the endpoint's
- * config_update state value is IN_PROGRESS, another update can not be made until the update
- * completes or fails."
- */
@Generated
public enum EndpointStateConfigUpdate {
IN_PROGRESS,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateReady.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateReady.java
index 19366dc63..ed5b9cf0a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateReady.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointStateReady.java
@@ -4,11 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is
- * READY if all of the served entities in its active configuration are ready. If any of the actively
- * served entities are in a non-ready state, the endpoint state will be NOT_READY.
- */
@Generated
public enum EndpointStateReady {
NOT_READY,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java
new file mode 100755
index 000000000..1fb13a9b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointTags.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class EndpointTags {
+ /** */
+ @JsonProperty("tags")
+ private Collection tags;
+
+ public EndpointTags setTags(Collection tags) {
+ this.tags = tags;
+ return this;
+ }
+
+ public Collection getTags() {
+ return tags;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EndpointTags that = (EndpointTags) o;
+ return Objects.equals(tags, that.tags);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(tags);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EndpointTags.class).add("tags", tags).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java
new file mode 100755
index 000000000..ab122974a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java
@@ -0,0 +1,123 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Simple Proto message for testing */
+@Generated
+public class ExternalFunctionRequest {
+ /** The connection name to use. This is required to identify the external connection. */
+ @JsonProperty("connection_name")
+ private String connectionName;
+
+ /**
+ * Additional headers for the request. If not provided, only auth headers from connections would
+ * be passed.
+ */
+ @JsonProperty("headers")
+ private String headers;
+
+ /** The JSON payload to send in the request body. */
+ @JsonProperty("json")
+ private String json;
+
+ /** The HTTP method to use (e.g., 'GET', 'POST'). */
+ @JsonProperty("method")
+ private ExternalFunctionRequestHttpMethod method;
+
+ /** Query parameters for the request. */
+ @JsonProperty("params")
+ private String params;
+
+ /** The relative path for the API endpoint. This is required. */
+ @JsonProperty("path")
+ private String path;
+
+ public ExternalFunctionRequest setConnectionName(String connectionName) {
+ this.connectionName = connectionName;
+ return this;
+ }
+
+ public String getConnectionName() {
+ return connectionName;
+ }
+
+ public ExternalFunctionRequest setHeaders(String headers) {
+ this.headers = headers;
+ return this;
+ }
+
+ public String getHeaders() {
+ return headers;
+ }
+
+ public ExternalFunctionRequest setJson(String json) {
+ this.json = json;
+ return this;
+ }
+
+ public String getJson() {
+ return json;
+ }
+
+ public ExternalFunctionRequest setMethod(ExternalFunctionRequestHttpMethod method) {
+ this.method = method;
+ return this;
+ }
+
+ public ExternalFunctionRequestHttpMethod getMethod() {
+ return method;
+ }
+
+ public ExternalFunctionRequest setParams(String params) {
+ this.params = params;
+ return this;
+ }
+
+ public String getParams() {
+ return params;
+ }
+
+ public ExternalFunctionRequest setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExternalFunctionRequest that = (ExternalFunctionRequest) o;
+ return Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(headers, that.headers)
+ && Objects.equals(json, that.json)
+ && Objects.equals(method, that.method)
+ && Objects.equals(params, that.params)
+ && Objects.equals(path, that.path);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(connectionName, headers, json, method, params, path);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExternalFunctionRequest.class)
+ .add("connectionName", connectionName)
+ .add("headers", headers)
+ .add("json", json)
+ .add("method", method)
+ .add("params", params)
+ .add("path", path)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestHttpMethod.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestHttpMethod.java
new file mode 100755
index 000000000..df660e2d1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequestHttpMethod.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ExternalFunctionRequestHttpMethod {
+ DELETE,
+ GET,
+ PATCH,
+ POST,
+ PUT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java
index acd14f7fc..f33517662 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java
@@ -50,7 +50,7 @@ public class ExternalModel {
/**
* The name of the provider for the external model. Currently, the supported providers are
* 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
- * 'google-cloud-vertex-ai', 'openai', and 'palm'.",
+ * 'google-cloud-vertex-ai', 'openai', and 'palm'.
*/
@JsonProperty("provider")
private ExternalModelProvider provider;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java
index a08a69fb7..0d3553cb6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java
@@ -5,11 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * The name of the provider for the external model. Currently, the supported providers are
- * 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
- * 'google-cloud-vertex-ai', 'openai', and 'palm'.",
- */
@Generated
public enum ExternalModelProvider {
@JsonProperty("ai21labs")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java
index 88ae78514..c5a818bcd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java
@@ -7,21 +7,25 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * All fields are not sensitive as they are hard-coded in the system and made available to
+ * customers.
+ */
@Generated
public class FoundationModel {
- /** The description of the foundation model. */
+ /** */
@JsonProperty("description")
private String description;
- /** The display name of the foundation model. */
+ /** */
@JsonProperty("display_name")
private String displayName;
- /** The URL to the documentation of the foundation model. */
+ /** */
@JsonProperty("docs")
private String docs;
- /** The name of the foundation model. */
+ /** */
@JsonProperty("name")
private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java
index 9b5f37612..6512b0530 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java
@@ -4,29 +4,39 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.io.InputStream;
import java.util.Objects;
-/**
- * The response is an OpenAPI spec in JSON format that typically includes fields like openapi, info,
- * servers and paths, etc.
- */
@Generated
public class GetOpenApiResponse {
+ /** */
+ @JsonIgnore private InputStream contents;
+
+ public GetOpenApiResponse setContents(InputStream contents) {
+ this.contents = contents;
+ return this;
+ }
+
+ public InputStream getContents() {
+ return contents;
+ }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- return true;
+ GetOpenApiResponse that = (GetOpenApiResponse) o;
+ return Objects.equals(contents, that.contents);
}
@Override
public int hashCode() {
- return Objects.hash();
+ return Objects.hash(contents);
}
@Override
public String toString() {
- return new ToStringer(GetOpenApiResponse.class).toString();
+ return new ToStringer(GetOpenApiResponse.class).add("contents", contents).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java
new file mode 100755
index 000000000..57e459ca3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/HttpRequestResponse.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.io.InputStream;
+import java.util.Objects;
+
+@Generated
+public class HttpRequestResponse {
+ /** */
+ @JsonIgnore private InputStream contents;
+
+ public HttpRequestResponse setContents(InputStream contents) {
+ this.contents = contents;
+ return this;
+ }
+
+ public InputStream getContents() {
+ return contents;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ HttpRequestResponse that = (HttpRequestResponse) o;
+ return Objects.equals(contents, that.contents);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(contents);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(HttpRequestResponse.class).add("contents", contents).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java
index 8a77e0b2a..354f3eae3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ModelDataPlaneInfo.java
@@ -7,6 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * A representation of all DataPlaneInfo for operations that can be done on a model through Data
+ * Plane APIs.
+ */
@Generated
public class ModelDataPlaneInfo {
/** Information required to query DataPlane API 'query' endpoint. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java
index 86c37887e..d7319fc73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfig.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Configs needed to create an OpenAI model route. */
@Generated
public class OpenAiConfig {
/** This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java
index c5d44536b..500d98298 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java
@@ -9,15 +9,15 @@
@Generated
public class PayloadTable {
- /** The name of the payload table. */
+ /** */
@JsonProperty("name")
private String name;
- /** The status of the payload table. */
+ /** */
@JsonProperty("status")
private String status;
- /** The status message of the payload table. */
+ /** */
@JsonProperty("status_message")
private String statusMessage;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java
index 5413161e6..84d652b28 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java
@@ -9,7 +9,6 @@
import java.util.Collection;
import java.util.Objects;
-/** Update AI Gateway of a serving endpoint */
@Generated
public class PutAiGatewayRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java
index 468a44869..021c964c7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java
@@ -19,7 +19,7 @@ public class PutAiGatewayResponse {
/**
* Configuration for payload logging using inference tables. Use these tables to monitor and audit
- * data being sent to and received from model APIs and to improve model quality .
+ * data being sent to and received from model APIs and to improve model quality.
*/
@JsonProperty("inference_table_config")
private AiGatewayInferenceTableConfig inferenceTableConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java
index 8e28b84b3..f8cf5eb23 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java
@@ -9,7 +9,6 @@
import java.util.Collection;
import java.util.Objects;
-/** Update rate limits of a serving endpoint */
@Generated
public class PutRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitKey.java
index b34de5c4f..94d9adc69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitKey.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitKey.java
@@ -5,10 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported,
- * with 'endpoint' being the default if not specified.
- */
@Generated
public enum RateLimitKey {
@JsonProperty("endpoint")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitRenewalPeriod.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitRenewalPeriod.java
index a3bdabb7e..56608ff87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitRenewalPeriod.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/RateLimitRenewalPeriod.java
@@ -5,9 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
- */
@Generated
public enum RateLimitRenewalPeriod {
@JsonProperty("minute")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
index 70e694da5..c0d50fa56 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
@@ -14,15 +14,12 @@ public class ServedEntityInput {
* The name of the entity to be served. The entity may be a model in the Databricks Model
* Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If
* it is a UC object, the full name of the object should be given in the form of
- * __catalog_name__.__schema_name__.__model_name__.
+ * **catalog_name.schema_name.model_name**.
*/
@JsonProperty("entity_name")
private String entityName;
- /**
- * The version of the model in Databricks Model Registry to be served or empty if the entity is a
- * FEATURE_SPEC.
- */
+ /** */
@JsonProperty("entity_version")
private String entityVersion;
@@ -63,7 +60,7 @@ public class ServedEntityInput {
* The name of a served entity. It must be unique across an endpoint. A served entity name can
* consist of alphanumeric characters, dashes, and underscores. If not specified for an external
* model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
- * not specified for other entities, it defaults to -.
+ * not specified for other entities, it defaults to entity_name-entity_version.
*/
@JsonProperty("name")
private String name;
@@ -90,10 +87,10 @@ public class ServedEntityInput {
* available [GPU types].
*
* [GPU types]:
- * https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
+ * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
*/
@JsonProperty("workload_type")
- private String workloadType;
+ private ServingModelWorkloadType workloadType;
public ServedEntityInput setEntityName(String entityName) {
this.entityName = entityName;
@@ -185,12 +182,12 @@ public String getWorkloadSize() {
return workloadSize;
}
- public ServedEntityInput setWorkloadType(String workloadType) {
+ public ServedEntityInput setWorkloadType(ServingModelWorkloadType workloadType) {
this.workloadType = workloadType;
return this;
}
- public String getWorkloadType() {
+ public ServingModelWorkloadType getWorkloadType() {
return workloadType;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
index 33b3779e4..270d02af2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
@@ -10,27 +10,24 @@
@Generated
public class ServedEntityOutput {
- /** The creation timestamp of the served entity in Unix time. */
+ /** */
@JsonProperty("creation_timestamp")
private Long creationTimestamp;
- /** The email of the user who created the served entity. */
+ /** */
@JsonProperty("creator")
private String creator;
/**
- * The name of the entity served. The entity may be a model in the Databricks Model Registry, a
- * model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
- * object, the full name of the object is given in the form of
- * __catalog_name__.__schema_name__.__model_name__.
+ * The name of the entity to be served. The entity may be a model in the Databricks Model
+ * Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If
+ * it is a UC object, the full name of the object should be given in the form of
+ * **catalog_name.schema_name.model_name**.
*/
@JsonProperty("entity_name")
private String entityName;
- /**
- * The version of the served entity in Databricks Model Registry or empty if the entity is a
- * FEATURE_SPEC.
- */
+ /** */
@JsonProperty("entity_version")
private String entityVersion;
@@ -44,17 +41,20 @@ public class ServedEntityOutput {
private Map environmentVars;
/**
- * The external model that is served. NOTE: Only one of external_model, foundation_model, and
- * (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
- * returned based on the endpoint type.
+ * The external model to be served. NOTE: Only one of external_model and (entity_name,
+ * entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with
+ * the latter set being used for custom model serving for a Databricks registered model. For an
+ * existing endpoint with external_model, it cannot be updated to an endpoint without
+ * external_model. If the endpoint is created without external_model, users cannot update it to
+ * add external_model later. The task type of all external models within an endpoint must be the
+ * same.
*/
@JsonProperty("external_model")
private ExternalModel externalModel;
/**
- * The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
- * (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
- * returned based on the endpoint type.
+ * All fields are not sensitive as they are hard-coded in the system and made available to
+ * customers.
*/
@JsonProperty("foundation_model")
private FoundationModel foundationModel;
@@ -71,7 +71,12 @@ public class ServedEntityOutput {
@JsonProperty("min_provisioned_throughput")
private Long minProvisionedThroughput;
- /** The name of the served entity. */
+ /**
+ * The name of a served entity. It must be unique across an endpoint. A served entity name can
+ * consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+ * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+ * not specified for other entities, it defaults to entity_name-entity_version.
+ */
@JsonProperty("name")
private String name;
@@ -79,7 +84,7 @@ public class ServedEntityOutput {
@JsonProperty("scale_to_zero_enabled")
private Boolean scaleToZeroEnabled;
- /** Information corresponding to the state of the served entity. */
+ /** */
@JsonProperty("state")
private ServedModelState state;
@@ -89,7 +94,7 @@ public class ServedEntityOutput {
* process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
* concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size will be 0.
+ * each workload size is 0.
*/
@JsonProperty("workload_size")
private String workloadSize;
@@ -101,10 +106,10 @@ public class ServedEntityOutput {
* available [GPU types].
*
* [GPU types]:
- * https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
+ * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
*/
@JsonProperty("workload_type")
- private String workloadType;
+ private ServingModelWorkloadType workloadType;
public ServedEntityOutput setCreationTimestamp(Long creationTimestamp) {
this.creationTimestamp = creationTimestamp;
@@ -232,12 +237,12 @@ public String getWorkloadSize() {
return workloadSize;
}
- public ServedEntityOutput setWorkloadType(String workloadType) {
+ public ServedEntityOutput setWorkloadType(ServingModelWorkloadType workloadType) {
this.workloadType = workloadType;
return this;
}
- public String getWorkloadType() {
+ public ServingModelWorkloadType getWorkloadType() {
return workloadType;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
index cb987bb76..8ed57eb23 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
@@ -9,37 +9,26 @@
@Generated
public class ServedEntitySpec {
- /**
- * The name of the entity served. The entity may be a model in the Databricks Model Registry, a
- * model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
- * object, the full name of the object is given in the form of
- * __catalog_name__.__schema_name__.__model_name__.
- */
+ /** */
@JsonProperty("entity_name")
private String entityName;
- /**
- * The version of the served entity in Databricks Model Registry or empty if the entity is a
- * FEATURE_SPEC.
- */
+ /** */
@JsonProperty("entity_version")
private String entityVersion;
- /**
- * The external model that is served. NOTE: Only one of external_model, foundation_model, and
- * (entity_name, entity_version) is returned based on the endpoint type.
- */
+ /** */
@JsonProperty("external_model")
private ExternalModel externalModel;
/**
- * The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
- * (entity_name, entity_version) is returned based on the endpoint type.
+ * All fields are not sensitive as they are hard-coded in the system and made available to
+ * customers.
*/
@JsonProperty("foundation_model")
private FoundationModel foundationModel;
- /** The name of the served entity. */
+ /** */
@JsonProperty("name")
private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
index 6f4ab9ba6..4cdd5876e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
@@ -12,14 +12,14 @@
public class ServedModelInput {
/**
* An object containing a set of optional, user-specified environment variable key-value pairs
- * used for serving this model. Note: this is an experimental feature and subject to change.
- * Example model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+ * used for serving this entity. Note: this is an experimental feature and subject to change.
+ * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
* "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
*/
@JsonProperty("environment_vars")
private Map environmentVars;
- /** ARN of the instance profile that the served model will use to access AWS resources. */
+ /** ARN of the instance profile that the served entity uses to access AWS resources. */
@JsonProperty("instance_profile_arn")
private String instanceProfileArn;
@@ -31,49 +31,46 @@ public class ServedModelInput {
@JsonProperty("min_provisioned_throughput")
private Long minProvisionedThroughput;
- /**
- * The name of the model in Databricks Model Registry to be served or if the model resides in
- * Unity Catalog, the full name of model, in the form of
- * __catalog_name__.__schema_name__.__model_name__.
- */
+ /** */
@JsonProperty("model_name")
private String modelName;
- /** The version of the model in Databricks Model Registry or Unity Catalog to be served. */
+ /** */
@JsonProperty("model_version")
private String modelVersion;
/**
- * The name of a served model. It must be unique across an endpoint. If not specified, this field
- * will default to -. A served model name can consist of alphanumeric
- * characters, dashes, and underscores.
+ * The name of a served entity. It must be unique across an endpoint. A served entity name can
+ * consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+ * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+ * not specified for other entities, it defaults to entity_name-entity_version.
*/
@JsonProperty("name")
private String name;
- /** Whether the compute resources for the served model should scale down to zero. */
+ /** Whether the compute resources for the served entity should scale down to zero. */
@JsonProperty("scale_to_zero_enabled")
private Boolean scaleToZeroEnabled;
/**
- * The workload size of the served model. The workload size corresponds to a range of provisioned
- * concurrency that the compute will autoscale between. A single unit of provisioned concurrency
- * can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
+ * The workload size of the served entity. The workload size corresponds to a range of provisioned
+ * concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+ * process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
* concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size will be 0.
+ * each workload size is 0.
*/
@JsonProperty("workload_size")
private ServedModelInputWorkloadSize workloadSize;
/**
- * The workload type of the served model. The workload type selects which type of compute to use
+ * The workload type of the served entity. The workload type selects which type of compute to use
* in the endpoint. The default value for this parameter is "CPU". For deep learning workloads,
* GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the
* available [GPU types].
*
* [GPU types]:
- * https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
+ * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
*/
@JsonProperty("workload_type")
private ServedModelInputWorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java
index b85510254..db3122951 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java
@@ -5,14 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * The workload size of the served model. The workload size corresponds to a range of provisioned
- * concurrency that the compute will autoscale between. A single unit of provisioned concurrency can
- * process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
- * "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
- * scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
- * will be 0.
- */
@Generated
public enum ServedModelInputWorkloadSize {
@JsonProperty("Large")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java
index 049c32eee..64e5315e9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java
@@ -4,15 +4,6 @@
import com.databricks.sdk.support.Generated;
-/**
- * The workload type of the served model. The workload type selects which type of compute to use in
- * the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
- * acceleration is available by selecting workload types like GPU_SMALL and others. See the
- * available [GPU types].
- *
- *
[GPU types]:
- * https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
- */
@Generated
public enum ServedModelInputWorkloadType {
CPU,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
index 332a80e44..36b67562d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
@@ -10,72 +10,74 @@
@Generated
public class ServedModelOutput {
- /** The creation timestamp of the served model in Unix time. */
+ /** */
@JsonProperty("creation_timestamp")
private Long creationTimestamp;
- /** The email of the user who created the served model. */
+ /** */
@JsonProperty("creator")
private String creator;
/**
* An object containing a set of optional, user-specified environment variable key-value pairs
- * used for serving this model. Note: this is an experimental feature and subject to change.
- * Example model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+ * used for serving this entity. Note: this is an experimental feature and subject to change.
+ * Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
* "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
*/
@JsonProperty("environment_vars")
private Map environmentVars;
- /** ARN of the instance profile that the served model will use to access AWS resources. */
+ /** ARN of the instance profile that the served entity uses to access AWS resources. */
@JsonProperty("instance_profile_arn")
private String instanceProfileArn;
- /**
- * The name of the model in Databricks Model Registry or the full name of the model in Unity
- * Catalog.
- */
+ /** */
@JsonProperty("model_name")
private String modelName;
- /** The version of the model in Databricks Model Registry or Unity Catalog to be served. */
+ /** */
@JsonProperty("model_version")
private String modelVersion;
- /** The name of the served model. */
+ /**
+ * The name of a served entity. It must be unique across an endpoint. A served entity name can
+ * consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+ * model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+ * not specified for other entities, it defaults to entity_name-entity_version.
+ */
@JsonProperty("name")
private String name;
- /** Whether the compute resources for the Served Model should scale down to zero. */
+ /** Whether the compute resources for the served entity should scale down to zero. */
@JsonProperty("scale_to_zero_enabled")
private Boolean scaleToZeroEnabled;
- /** Information corresponding to the state of the Served Model. */
+ /** */
@JsonProperty("state")
private ServedModelState state;
/**
- * The workload size of the served model. The workload size corresponds to a range of provisioned
- * concurrency that the compute will autoscale between. A single unit of provisioned concurrency
- * can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
+ * The workload size of the served entity. The workload size corresponds to a range of provisioned
+ * concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+ * process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
* concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size will be 0.
+ * each workload size is 0.
*/
@JsonProperty("workload_size")
private String workloadSize;
/**
- * The workload type of the served model. The workload type selects which type of compute to use
+ * The workload type of the served entity. The workload type selects which type of compute to use
* in the endpoint. The default value for this parameter is "CPU". For deep learning workloads,
* GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the
* available [GPU types].
*
* [GPU types]:
- * https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
+ * https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
*/
@JsonProperty("workload_type")
- private String workloadType;
+ private ServingModelWorkloadType workloadType;
public ServedModelOutput setCreationTimestamp(Long creationTimestamp) {
this.creationTimestamp = creationTimestamp;
@@ -167,12 +169,12 @@ public String getWorkloadSize() {
return workloadSize;
}
- public ServedModelOutput setWorkloadType(String workloadType) {
+ public ServedModelOutput setWorkloadType(ServingModelWorkloadType workloadType) {
this.workloadType = workloadType;
return this;
}
- public String getWorkloadType() {
+ public ServingModelWorkloadType getWorkloadType() {
return workloadType;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java
index 4a6c61b46..233618bd9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java
@@ -9,18 +9,15 @@
@Generated
public class ServedModelSpec {
- /**
- * The name of the model in Databricks Model Registry or the full name of the model in Unity
- * Catalog.
- */
+ /** Only one of model_name and entity_name should be populated */
@JsonProperty("model_name")
private String modelName;
- /** The version of the model in Databricks Model Registry or Unity Catalog to be served. */
+ /** Only one of model_version and entity_version should be populated */
@JsonProperty("model_version")
private String modelVersion;
- /** The name of the served model. */
+ /** */
@JsonProperty("name")
private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java
index d5063904f..4c71f5360 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelState.java
@@ -9,21 +9,11 @@
@Generated
public class ServedModelState {
- /**
- * The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
- * is not ready yet because the deployment is still being created (i.e container image is
- * building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates
- * that the served entity was previously in a ready state but no longer is and is attempting to
- * recover. DEPLOYMENT_READY indicates that the served entity is ready to receive traffic.
- * DEPLOYMENT_FAILED indicates that there was an error trying to bring up the served entity (e.g
- * container image build failed, the model server failed to start due to a model loading error,
- * etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure
- * in bringing up another served entity under the same endpoint and config version.
- */
+ /** */
@JsonProperty("deployment")
private ServedModelStateDeployment deployment;
- /** More information about the state of the served entity, if available. */
+ /** */
@JsonProperty("deployment_state_message")
private String deploymentStateMessage;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStateDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStateDeployment.java
index 1b588a45b..e8d32a02a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStateDeployment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelStateDeployment.java
@@ -5,17 +5,6 @@
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.annotation.JsonProperty;
-/**
- * The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
- * is not ready yet because the deployment is still being created (i.e container image is building,
- * model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the
- * served entity was previously in a ready state but no longer is and is attempting to recover.
- * DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED
- * indicates that there was an error trying to bring up the served entity (e.g container image build
- * failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED
- * indicates that the deployment was terminated likely due to a failure in bringing up another
- * served entity under the same endpoint and config version.
- */
@Generated
public enum ServedModelStateDeployment {
@JsonProperty("DEPLOYMENT_ABORTED")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java
index fe0a79c34..391d622e7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpoint.java
@@ -11,8 +11,8 @@
@Generated
public class ServingEndpoint {
/**
- * The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
- * currently supported.
+ * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and
+ * provisioned throughput endpoints are currently supported.
*/
@JsonProperty("ai_gateway")
private AiGatewayConfig aiGateway;
@@ -29,10 +29,7 @@ public class ServingEndpoint {
@JsonProperty("creator")
private String creator;
- /**
- * System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions
- * API
- */
+ /** System-generated ID of the endpoint, included to be used by the Permissions API. */
@JsonProperty("id")
private String id;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java
index da3d66e63..a0cc5a1c4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailed.java
@@ -11,8 +11,8 @@
@Generated
public class ServingEndpointDetailed {
/**
- * The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
- * currently supported.
+ * The AI Gateway configuration for the serving endpoint. NOTE: Only external model and
+ * provisioned throughput endpoints are currently supported.
*/
@JsonProperty("ai_gateway")
private AiGatewayConfig aiGateway;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPermissionLevel.java
index edb188345..6104d8301 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPermissionLevel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointDetailedPermissionLevel.java
@@ -4,7 +4,6 @@
import com.databricks.sdk.support.Generated;
-/** The permission level of the principal making the request. */
@Generated
public enum ServingEndpointDetailedPermissionLevel {
CAN_MANAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
index 37c8b27b2..3e7e3347c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
@@ -104,9 +104,8 @@ public BuildLogsResponse buildLogs(BuildLogsRequest request) {
return impl.buildLogs(request);
}
- public Wait create(
- String name, EndpointCoreConfigInput config) {
- return create(new CreateServingEndpoint().setName(name).setConfig(config));
+ public Wait create(String name) {
+ return create(new CreateServingEndpoint().setName(name));
}
/** Create a new serving endpoint. */
@@ -155,8 +154,8 @@ public ServingEndpointDetailed get(GetServingEndpointRequest request) {
return impl.get(request);
}
- public void getOpenApi(String name) {
- getOpenApi(new GetOpenApiRequest().setName(name));
+ public GetOpenApiResponse getOpenApi(String name) {
+ return getOpenApi(new GetOpenApiRequest().setName(name));
}
/**
@@ -165,8 +164,8 @@ public void getOpenApi(String name) {
* Get the query schema of the serving endpoint in OpenAPI format. The schema contains
* information for the supported paths, input and output format and datatypes.
*/
- public void getOpenApi(GetOpenApiRequest request) {
- impl.getOpenApi(request);
+ public GetOpenApiResponse getOpenApi(GetOpenApiRequest request) {
+ return impl.getOpenApi(request);
}
public GetServingEndpointPermissionLevelsResponse getPermissionLevels(String servingEndpointId) {
@@ -199,6 +198,20 @@ public ServingEndpointPermissions getPermissions(GetServingEndpointPermissionsRe
return impl.getPermissions(request);
}
+ public HttpRequestResponse httpRequest(
+ String connectionName, ExternalFunctionRequestHttpMethod method, String path) {
+ return httpRequest(
+ new ExternalFunctionRequest()
+ .setConnectionName(connectionName)
+ .setMethod(method)
+ .setPath(path));
+ }
+
+ /** Make external services call using the credentials stored in UC Connection. */
+ public HttpRequestResponse httpRequest(ExternalFunctionRequest request) {
+ return impl.httpRequest(request);
+ }
+
/** Get all serving endpoints. */
public Iterable list() {
return new Paginator<>(
@@ -218,7 +231,7 @@ public ServerLogsResponse logs(LogsRequest request) {
return impl.logs(request);
}
- public Iterable patch(String name) {
+ public EndpointTags patch(String name) {
return patch(new PatchServingEndpointTags().setName(name));
}
@@ -227,7 +240,7 @@ public Iterable patch(String name) {
*
* Used to batch add and delete tags from a serving endpoint with a single API call.
*/
- public Iterable patch(PatchServingEndpointTags request) {
+ public EndpointTags patch(PatchServingEndpointTags request) {
return impl.patch(request);
}
@@ -252,8 +265,8 @@ public PutAiGatewayResponse putAiGateway(String name) {
/**
* Update AI Gateway of a serving endpoint.
*
- * Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are
- * currently supported.
+ *
Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and
+ * provisioned throughput endpoints are currently supported.
*/
public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) {
return impl.putAiGateway(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
index 1f868b724..5a32ebcbc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
@@ -6,7 +6,6 @@
import com.databricks.sdk.core.http.Request;
import com.databricks.sdk.support.Generated;
import java.io.IOException;
-import java.util.Collection;
/** Package-local implementation of ServingEndpoints */
@Generated
@@ -87,13 +86,13 @@ public ServingEndpointDetailed get(GetServingEndpointRequest request) {
}
@Override
- public void getOpenApi(GetOpenApiRequest request) {
+ public GetOpenApiResponse getOpenApi(GetOpenApiRequest request) {
String path = String.format("/api/2.0/serving-endpoints/%s/openapi", request.getName());
try {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- apiClient.execute(req, GetOpenApiResponse.class);
+ req.withHeader("Accept", "text/plain");
+ return apiClient.execute(req, GetOpenApiResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -130,6 +129,20 @@ public ServingEndpointPermissions getPermissions(GetServingEndpointPermissionsRe
}
}
+ @Override
+ public HttpRequestResponse httpRequest(ExternalFunctionRequest request) {
+ String path = "/api/2.0/external-function";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "text/plain");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, HttpRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListEndpointsResponse list() {
String path = "/api/2.0/serving-endpoints";
@@ -159,12 +172,17 @@ public ServerLogsResponse logs(LogsRequest request) {
}
@Override
- public Collection patch(PatchServingEndpointTags request) {
+ public EndpointTags patch(PatchServingEndpointTags request) {
String path = String.format("/api/2.0/serving-endpoints/%s/tags", request.getName());
- Request req = new Request("GET", path);
- req.withHeader("Accept", "application/json");
- req.withHeader("Content-Type", "application/json");
- return apiClient.getCollection(req, EndpointTag.class);
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, EndpointTags.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
}
@Override
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
index 5a42d11ce..a6c95cedd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
@@ -2,7 +2,6 @@
package com.databricks.sdk.service.serving;
import com.databricks.sdk.support.Generated;
-import java.util.Collection;
/**
* The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
@@ -56,7 +55,7 @@ public interface ServingEndpointsService {
* Get the query schema of the serving endpoint in OpenAPI format. The schema contains
* information for the supported paths, input and output format and datatypes.
*/
- void getOpenApi(GetOpenApiRequest getOpenApiRequest);
+ GetOpenApiResponse getOpenApi(GetOpenApiRequest getOpenApiRequest);
/**
* Get serving endpoint permission levels.
@@ -75,6 +74,9 @@ GetServingEndpointPermissionLevelsResponse getPermissionLevels(
ServingEndpointPermissions getPermissions(
GetServingEndpointPermissionsRequest getServingEndpointPermissionsRequest);
+ /** Make external services call using the credentials stored in UC Connection. */
+ HttpRequestResponse httpRequest(ExternalFunctionRequest externalFunctionRequest);
+
/** Get all serving endpoints. */
ListEndpointsResponse list();
@@ -90,7 +92,7 @@ ServingEndpointPermissions getPermissions(
*
*
Used to batch add and delete tags from a serving endpoint with a single API call.
*/
- Collection patch(PatchServingEndpointTags patchServingEndpointTags);
+ EndpointTags patch(PatchServingEndpointTags patchServingEndpointTags);
/**
* Update rate limits of a serving endpoint.
@@ -103,8 +105,8 @@ ServingEndpointPermissions getPermissions(
/**
* Update AI Gateway of a serving endpoint.
*
- * Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are
- * currently supported.
+ *
Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and
+ * provisioned throughput endpoints are currently supported.
*/
PutAiGatewayResponse putAiGateway(PutAiGatewayRequest putAiGatewayRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java
new file mode 100755
index 000000000..7c67cd562
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ServingModelWorkloadType {
+ CPU,
+ GPU_LARGE,
+ GPU_MEDIUM,
+ GPU_SMALL,
+ MULTIGPU_MEDIUM,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java
new file mode 100755
index 000000000..4b2251f62
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessEnable.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AccountIpAccessEnable {
+ /** */
+ @JsonProperty("acct_ip_acl_enable")
+ private BooleanMessage acctIpAclEnable;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public AccountIpAccessEnable setAcctIpAclEnable(BooleanMessage acctIpAclEnable) {
+ this.acctIpAclEnable = acctIpAclEnable;
+ return this;
+ }
+
+ public BooleanMessage getAcctIpAclEnable() {
+ return acctIpAclEnable;
+ }
+
+ public AccountIpAccessEnable setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public AccountIpAccessEnable setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccountIpAccessEnable that = (AccountIpAccessEnable) o;
+ return Objects.equals(acctIpAclEnable, that.acctIpAclEnable)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(acctIpAclEnable, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccountIpAccessEnable.class)
+ .add("acctIpAclEnable", acctIpAclEnable)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
index ff93f6cf9..921d60bee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
@@ -17,6 +17,8 @@ public class AccountSettingsAPI {
private DisableLegacyFeaturesAPI disableLegacyFeaturesAPI;
+ private EnableIpAccessListsAPI enableIpAccessListsAPI;
+
private EsmEnablementAccountAPI esmEnablementAccountAPI;
private PersonalComputeAPI personalComputeAPI;
@@ -29,6 +31,8 @@ public AccountSettingsAPI(ApiClient apiClient) {
disableLegacyFeaturesAPI = new DisableLegacyFeaturesAPI(apiClient);
+ enableIpAccessListsAPI = new EnableIpAccessListsAPI(apiClient);
+
esmEnablementAccountAPI = new EsmEnablementAccountAPI(apiClient);
personalComputeAPI = new PersonalComputeAPI(apiClient);
@@ -52,6 +56,11 @@ public DisableLegacyFeaturesAPI DisableLegacyFeatures() {
return disableLegacyFeaturesAPI;
}
+ /** Controls the enforcement of IP access lists for accessing the account console. */
+ public EnableIpAccessListsAPI EnableIpAccessLists() {
+ return enableIpAccessListsAPI;
+ }
+
/**
* The enhanced security monitoring setting at the account level controls whether to enable the
* feature on new workspaces.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java
index 7cf507e15..cd86c2704 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceStandard.java
@@ -13,7 +13,9 @@ public enum ComplianceStandard {
FEDRAMP_IL5,
FEDRAMP_MODERATE,
HIPAA,
+ HITRUST,
IRAP_PROTECTED,
+ ISMAP,
ITAR_EAR,
NONE,
PCI_DSS,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java
new file mode 100755
index 000000000..ef3df304b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete the account IP access toggle setting */
+@Generated
+public class DeleteAccountIpAccessEnableRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteAccountIpAccessEnableRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAccountIpAccessEnableRequest that = (DeleteAccountIpAccessEnableRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAccountIpAccessEnableRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java
new file mode 100755
index 000000000..7151c3fec
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteAccountIpAccessEnableResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteAccountIpAccessEnableResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAccountIpAccessEnableResponse that = (DeleteAccountIpAccessEnableResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAccountIpAccessEnableResponse.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java
new file mode 100755
index 000000000..19c5d4c5e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsAPI.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Controls the enforcement of IP access lists for accessing the account console. Allowing you to
+ * enable or disable restricted access based on IP addresses.
+ */
+@Generated
+public class EnableIpAccessListsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(EnableIpAccessListsAPI.class);
+
+ private final EnableIpAccessListsService impl;
+
+ /** Regular-use constructor */
+ public EnableIpAccessListsAPI(ApiClient apiClient) {
+ impl = new EnableIpAccessListsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public EnableIpAccessListsAPI(EnableIpAccessListsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Delete the account IP access toggle setting.
+ *
+ *
Reverts the value of the account IP access toggle setting to default (ON)
+ */
+ public DeleteAccountIpAccessEnableResponse delete(DeleteAccountIpAccessEnableRequest request) {
+ return impl.delete(request);
+ }
+
+ /**
+ * Get the account IP access toggle setting.
+ *
+ *
Gets the value of the account IP access toggle setting.
+ */
+ public AccountIpAccessEnable get(GetAccountIpAccessEnableRequest request) {
+ return impl.get(request);
+ }
+
+ public AccountIpAccessEnable update(
+ boolean allowMissing, AccountIpAccessEnable setting, String fieldMask) {
+ return update(
+ new UpdateAccountIpAccessEnableRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the account IP access toggle setting.
+ *
+ *
Updates the value of the account IP access toggle setting.
+ */
+ public AccountIpAccessEnable update(UpdateAccountIpAccessEnableRequest request) {
+ return impl.update(request);
+ }
+
+ public EnableIpAccessListsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java
new file mode 100755
index 000000000..000b182be
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsImpl.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of EnableIpAccessLists */
+@Generated
+class EnableIpAccessListsImpl implements EnableIpAccessListsService {
+ private final ApiClient apiClient;
+
+ public EnableIpAccessListsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteAccountIpAccessEnableResponse delete(DeleteAccountIpAccessEnableRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/acct_ip_acl_enable/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DeleteAccountIpAccessEnableResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AccountIpAccessEnable get(GetAccountIpAccessEnableRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/acct_ip_acl_enable/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, AccountIpAccessEnable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AccountIpAccessEnable update(UpdateAccountIpAccessEnableRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/acct_ip_acl_enable/names/default",
+ apiClient.configuredAccountID());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, AccountIpAccessEnable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsService.java
new file mode 100755
index 000000000..02340930d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableIpAccessListsService.java
@@ -0,0 +1,38 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Controls the enforcement of IP access lists for accessing the account console. Allowing you to
+ * enable or disable restricted access based on IP addresses.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface EnableIpAccessListsService {
+ /**
+ * Delete the account IP access toggle setting.
+ *
+ *
Reverts the value of the account IP access toggle setting to default (ON)
+ */
+ DeleteAccountIpAccessEnableResponse delete(
+ DeleteAccountIpAccessEnableRequest deleteAccountIpAccessEnableRequest);
+
+ /**
+ * Get the account IP access toggle setting.
+ *
+ *
Gets the value of the account IP access toggle setting.
+ */
+ AccountIpAccessEnable get(GetAccountIpAccessEnableRequest getAccountIpAccessEnableRequest);
+
+ /**
+ * Update the account IP access toggle setting.
+ *
+ *
Updates the value of the account IP access toggle setting.
+ */
+ AccountIpAccessEnable update(
+ UpdateAccountIpAccessEnableRequest updateAccountIpAccessEnableRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java
new file mode 100755
index 000000000..348af6711
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the account IP access toggle setting */
+@Generated
+public class GetAccountIpAccessEnableRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetAccountIpAccessEnableRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAccountIpAccessEnableRequest that = (GetAccountIpAccessEnableRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAccountIpAccessEnableRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
index 23068a3ef..cbd1e3bcd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
@@ -8,5 +8,6 @@
@Generated
public enum TokenType {
ARCLIGHT_AZURE_EXCHANGE_TOKEN,
+ ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY,
AZURE_ACTIVE_DIRECTORY_TOKEN,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java
new file mode 100755
index 000000000..60cb583c2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountIpAccessEnableRequest.java
@@ -0,0 +1,85 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateAccountIpAccessEnableRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private AccountIpAccessEnable setting;
+
+ public UpdateAccountIpAccessEnableRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateAccountIpAccessEnableRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateAccountIpAccessEnableRequest setSetting(AccountIpAccessEnable setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public AccountIpAccessEnable getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAccountIpAccessEnableRequest that = (UpdateAccountIpAccessEnableRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAccountIpAccessEnableRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java
index 9e8a2ff89..e41d32f7f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
index a3e7de0dd..a0696f850 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java
index 88bbc50f3..4231071d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateAutomaticClusterUpdateSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java
index 864251cf4..6eb4c62b2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateComplianceSecurityProfileSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java
index 3ba22a444..a1243c1cd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateCspEnablementAccountSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java
index 0efc5c7ac..3e26425dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateDefaultNamespaceSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java
index a90d8df94..92ecb6463 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java
@@ -15,9 +15,15 @@ public class UpdateDisableLegacyAccessRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java
index 6c657d6b3..9859a2ade 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java
@@ -15,9 +15,15 @@ public class UpdateDisableLegacyDbfsRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java
index c6c77c614..d54d4f516 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java
@@ -15,9 +15,15 @@ public class UpdateDisableLegacyFeaturesRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java
index da9566101..9acd11789 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateEnhancedSecurityMonitoringSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java
index 3bda7402b..224635191 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateEsmEnablementAccountSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java
index 000fc8d6d..50470709c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdatePersonalComputeSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java
index 09128be99..e6162fd5b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java
@@ -15,9 +15,15 @@ public class UpdateRestrictWorkspaceAdminsSettingRequest {
private Boolean allowMissing;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("field_mask")
private String fieldMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java
index 300665744..f534903c8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java
@@ -21,7 +21,10 @@ public class CreateProvider {
@JsonProperty("name")
private String name;
- /** This field is required when the __authentication_type__ is **TOKEN** or not provided. */
+ /**
+ * This field is required when the __authentication_type__ is **TOKEN**,
+ * **OAUTH_CLIENT_CREDENTIALS** or not provided.
+ */
@JsonProperty("recipient_profile_str")
private String recipientProfileStr;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java
index a70b6d81f..b7589d41f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java
@@ -18,8 +18,8 @@ public class CreateRecipient {
private String comment;
/**
- * The global Unity Catalog metastore id provided by the data recipient. This field is required
- * when the __authentication_type__ is **DATABRICKS**. The identifier is of format
+ * The global Unity Catalog metastore id provided by the data recipient. This field is only
+ * present when the __authentication_type__ is **DATABRICKS**. The identifier is of format
* __cloud__:__region__:__metastore-uuid__.
*/
@JsonProperty("data_recipient_global_metastore_id")
@@ -41,12 +41,16 @@ public class CreateRecipient {
@JsonProperty("owner")
private String owner;
- /** Recipient properties as map of string key-value pairs. */
+ /**
+ * Recipient properties as map of string key-value pairs. When provided in update request, the
+ * specified properties will override the existing properties. To add and remove properties, one
+ * would need to perform a read-modify-write.
+ */
@JsonProperty("properties_kvpairs")
private SecurablePropertiesKvPairs propertiesKvpairs;
/**
- * The one-time sharing code provided by the data recipient. This field is required when the
+ * The one-time sharing code provided by the data recipient. This field is only present when the
* __authentication_type__ is **DATABRICKS**.
*/
@JsonProperty("sharing_code")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java
index 66e8fae19..4b5999d5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProviderInfo.java
@@ -35,7 +35,7 @@ public class ProviderInfo {
/**
* The global UC metastore id of the data provider. This field is only present when the
* __authentication_type__ is **DATABRICKS**. The identifier is of format
- * ::.
+ * __cloud__:__region__:__metastore-uuid__.
*/
@JsonProperty("data_provider_global_metastore_id")
private String dataProviderGlobalMetastoreId;
@@ -55,11 +55,17 @@ public class ProviderInfo {
@JsonProperty("owner")
private String owner;
- /** The recipient profile. This field is only present when the authentication_type is `TOKEN`. */
+ /**
+ * The recipient profile. This field is only present when the authentication_type is `TOKEN` or
+ * `OAUTH_CLIENT_CREDENTIALS`.
+ */
@JsonProperty("recipient_profile")
private RecipientProfile recipientProfile;
- /** This field is only present when the authentication_type is `TOKEN` or not provided. */
+ /**
+ * This field is required when the __authentication_type__ is **TOKEN**,
+ * **OAUTH_CLIENT_CREDENTIALS** or not provided.
+ */
@JsonProperty("recipient_profile_str")
private String recipientProfileStr;
@@ -74,7 +80,7 @@ public class ProviderInfo {
@JsonProperty("updated_at")
private Long updatedAt;
- /** Username of user who last modified Share. */
+ /** Username of user who last modified Provider. */
@JsonProperty("updated_by")
private String updatedBy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java
index a007014c8..12cba1572 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientInfo.java
@@ -29,8 +29,8 @@ public class RecipientInfo {
private AuthenticationType authenticationType;
/**
- * Cloud vendor of the recipient's Unity Catalog Metstore. This field is only present when the
- * __authentication_type__ is **DATABRICKS**`.
+ * Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the
+ * __authentication_type__ is **DATABRICKS**.
*/
@JsonProperty("cloud")
private String cloud;
@@ -55,13 +55,17 @@ public class RecipientInfo {
@JsonProperty("data_recipient_global_metastore_id")
private String dataRecipientGlobalMetastoreId;
+ /** Expiration timestamp of the token, in epoch milliseconds. */
+ @JsonProperty("expiration_time")
+ private Long expirationTime;
+
/** IP Access List */
@JsonProperty("ip_access_list")
private IpAccessList ipAccessList;
/**
- * Unique identifier of recipient's Unity Catalog metastore. This field is only present when the
- * __authentication_type__ is **DATABRICKS**
+ * Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the
+ * __authentication_type__ is **DATABRICKS**.
*/
@JsonProperty("metastore_id")
private String metastoreId;
@@ -74,12 +78,16 @@ public class RecipientInfo {
@JsonProperty("owner")
private String owner;
- /** Recipient properties as map of string key-value pairs. */
+ /**
+ * Recipient properties as map of string key-value pairs. When provided in update request, the
+ * specified properties will override the existing properties. To add and remove properties, one
+ * would need to perform a read-modify-write.
+ */
@JsonProperty("properties_kvpairs")
private SecurablePropertiesKvPairs propertiesKvpairs;
/**
- * Cloud region of the recipient's Unity Catalog Metstore. This field is only present when the
+ * Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the
* __authentication_type__ is **DATABRICKS**.
*/
@JsonProperty("region")
@@ -176,6 +184,15 @@ public String getDataRecipientGlobalMetastoreId() {
return dataRecipientGlobalMetastoreId;
}
+ public RecipientInfo setExpirationTime(Long expirationTime) {
+ this.expirationTime = expirationTime;
+ return this;
+ }
+
+ public Long getExpirationTime() {
+ return expirationTime;
+ }
+
public RecipientInfo setIpAccessList(IpAccessList ipAccessList) {
this.ipAccessList = ipAccessList;
return this;
@@ -279,6 +296,7 @@ public boolean equals(Object o) {
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
&& Objects.equals(dataRecipientGlobalMetastoreId, that.dataRecipientGlobalMetastoreId)
+ && Objects.equals(expirationTime, that.expirationTime)
&& Objects.equals(ipAccessList, that.ipAccessList)
&& Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(name, that.name)
@@ -302,6 +320,7 @@ public int hashCode() {
createdAt,
createdBy,
dataRecipientGlobalMetastoreId,
+ expirationTime,
ipAccessList,
metastoreId,
name,
@@ -325,6 +344,7 @@ public String toString() {
.add("createdAt", createdAt)
.add("createdBy", createdBy)
.add("dataRecipientGlobalMetastoreId", dataRecipientGlobalMetastoreId)
+ .add("expirationTime", expirationTime)
.add("ipAccessList", ipAccessList)
.add("metastoreId", metastoreId)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java
index 03c401739..63f57697a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientTokenInfo.java
@@ -16,7 +16,7 @@ public class RecipientTokenInfo {
@JsonProperty("activation_url")
private String activationUrl;
- /** Time at which this recipient Token was created, in epoch milliseconds. */
+ /** Time at which this recipient token was created, in epoch milliseconds. */
@JsonProperty("created_at")
private Long createdAt;
@@ -32,11 +32,11 @@ public class RecipientTokenInfo {
@JsonProperty("id")
private String id;
- /** Time at which this recipient Token was updated, in epoch milliseconds. */
+ /** Time at which this recipient token was updated, in epoch milliseconds. */
@JsonProperty("updated_at")
private Long updatedAt;
- /** Username of recipient Token updater. */
+ /** Username of recipient token updater. */
@JsonProperty("updated_by")
private String updatedBy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
index b3d3c0453..b6e71c102 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java
@@ -47,7 +47,7 @@ public RecipientInfo create(String name, AuthenticationType authenticationType)
* Create a share recipient.
*
* Creates a new recipient with the delta sharing authentication type in the metastore. The
- * caller must be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+ * caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
*/
public RecipientInfo create(CreateRecipient request) {
return impl.create(request);
@@ -135,8 +135,8 @@ public GetRecipientSharePermissionsResponse sharePermissions(SharePermissionsReq
return impl.sharePermissions(request);
}
- public void update(String name) {
- update(new UpdateRecipient().setName(name));
+ public RecipientInfo update(String name) {
+ return update(new UpdateRecipient().setName(name));
}
/**
@@ -146,8 +146,8 @@ public void update(String name) {
* owner of the recipient. If the recipient name will be updated, the user must be both a
* metastore admin and the owner of the recipient.
*/
- public void update(UpdateRecipient request) {
- impl.update(request);
+ public RecipientInfo update(UpdateRecipient request) {
+ return impl.update(request);
}
public RecipientsService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java
index 8b70cc8a6..45f7c5ada 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java
@@ -99,14 +99,14 @@ public GetRecipientSharePermissionsResponse sharePermissions(SharePermissionsReq
}
@Override
- public void update(UpdateRecipient request) {
+ public RecipientInfo update(UpdateRecipient request) {
String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName());
try {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateResponse.class);
+ return apiClient.execute(req, RecipientInfo.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java
index 3f221080e..8265f978e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java
@@ -29,7 +29,7 @@ public interface RecipientsService {
* Create a share recipient.
*
*
Creates a new recipient with the delta sharing authentication type in the metastore. The
- * caller must be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+ * caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
*/
RecipientInfo create(CreateRecipient createRecipient);
@@ -84,5 +84,5 @@ GetRecipientSharePermissionsResponse sharePermissions(
* owner of the recipient. If the recipient name will be updated, the user must be both a
* metastore admin and the owner of the recipient.
*/
- void update(UpdateRecipient updateRecipient);
+ RecipientInfo update(UpdateRecipient updateRecipient);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java
index dda95d2c7..07b63c93c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RotateRecipientToken.java
@@ -18,7 +18,7 @@ public class RotateRecipientToken {
@JsonProperty("existing_token_expire_in_seconds")
private Long existingTokenExpireInSeconds;
- /** The name of the recipient. */
+ /** The name of the Recipient. */
@JsonIgnore private String name;
public RotateRecipientToken setExistingTokenExpireInSeconds(Long existingTokenExpireInSeconds) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java
index b980bc585..c6d0621dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java
@@ -25,7 +25,10 @@ public class UpdateProvider {
@JsonProperty("owner")
private String owner;
- /** This field is required when the __authentication_type__ is **TOKEN** or not provided. */
+ /**
+ * This field is required when the __authentication_type__ is **TOKEN**,
+ * **OAUTH_CLIENT_CREDENTIALS** or not provided.
+ */
@JsonProperty("recipient_profile_str")
private String recipientProfileStr;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java
index 3d6bd8237..3b896d11f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java
@@ -25,7 +25,7 @@ public class UpdateRecipient {
/** Name of the recipient. */
@JsonIgnore private String name;
- /** New name for the recipient. */
+ /** New name for the recipient. . */
@JsonProperty("new_name")
private String newName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java
new file mode 100755
index 000000000..e84cfa55d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java
@@ -0,0 +1,189 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ClientConfig {
+ /** */
+ @JsonProperty("allow_custom_js_visualizations")
+ private Boolean allowCustomJsVisualizations;
+
+ /** */
+ @JsonProperty("allow_downloads")
+ private Boolean allowDownloads;
+
+ /** */
+ @JsonProperty("allow_external_shares")
+ private Boolean allowExternalShares;
+
+ /** */
+ @JsonProperty("allow_subscriptions")
+ private Boolean allowSubscriptions;
+
+ /** */
+ @JsonProperty("date_format")
+ private String dateFormat;
+
+ /** */
+ @JsonProperty("date_time_format")
+ private String dateTimeFormat;
+
+ /** */
+ @JsonProperty("disable_publish")
+ private Boolean disablePublish;
+
+ /** */
+ @JsonProperty("enable_legacy_autodetect_types")
+ private Boolean enableLegacyAutodetectTypes;
+
+ /** */
+ @JsonProperty("feature_show_permissions_control")
+ private Boolean featureShowPermissionsControl;
+
+ /** */
+ @JsonProperty("hide_plotly_mode_bar")
+ private Boolean hidePlotlyModeBar;
+
+ public ClientConfig setAllowCustomJsVisualizations(Boolean allowCustomJsVisualizations) {
+ this.allowCustomJsVisualizations = allowCustomJsVisualizations;
+ return this;
+ }
+
+ public Boolean getAllowCustomJsVisualizations() {
+ return allowCustomJsVisualizations;
+ }
+
+ public ClientConfig setAllowDownloads(Boolean allowDownloads) {
+ this.allowDownloads = allowDownloads;
+ return this;
+ }
+
+ public Boolean getAllowDownloads() {
+ return allowDownloads;
+ }
+
+ public ClientConfig setAllowExternalShares(Boolean allowExternalShares) {
+ this.allowExternalShares = allowExternalShares;
+ return this;
+ }
+
+ public Boolean getAllowExternalShares() {
+ return allowExternalShares;
+ }
+
+ public ClientConfig setAllowSubscriptions(Boolean allowSubscriptions) {
+ this.allowSubscriptions = allowSubscriptions;
+ return this;
+ }
+
+ public Boolean getAllowSubscriptions() {
+ return allowSubscriptions;
+ }
+
+ public ClientConfig setDateFormat(String dateFormat) {
+ this.dateFormat = dateFormat;
+ return this;
+ }
+
+ public String getDateFormat() {
+ return dateFormat;
+ }
+
+ public ClientConfig setDateTimeFormat(String dateTimeFormat) {
+ this.dateTimeFormat = dateTimeFormat;
+ return this;
+ }
+
+ public String getDateTimeFormat() {
+ return dateTimeFormat;
+ }
+
+ public ClientConfig setDisablePublish(Boolean disablePublish) {
+ this.disablePublish = disablePublish;
+ return this;
+ }
+
+ public Boolean getDisablePublish() {
+ return disablePublish;
+ }
+
+ public ClientConfig setEnableLegacyAutodetectTypes(Boolean enableLegacyAutodetectTypes) {
+ this.enableLegacyAutodetectTypes = enableLegacyAutodetectTypes;
+ return this;
+ }
+
+ public Boolean getEnableLegacyAutodetectTypes() {
+ return enableLegacyAutodetectTypes;
+ }
+
+ public ClientConfig setFeatureShowPermissionsControl(Boolean featureShowPermissionsControl) {
+ this.featureShowPermissionsControl = featureShowPermissionsControl;
+ return this;
+ }
+
+ public Boolean getFeatureShowPermissionsControl() {
+ return featureShowPermissionsControl;
+ }
+
+ public ClientConfig setHidePlotlyModeBar(Boolean hidePlotlyModeBar) {
+ this.hidePlotlyModeBar = hidePlotlyModeBar;
+ return this;
+ }
+
+ public Boolean getHidePlotlyModeBar() {
+ return hidePlotlyModeBar;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ClientConfig that = (ClientConfig) o;
+ return Objects.equals(allowCustomJsVisualizations, that.allowCustomJsVisualizations)
+ && Objects.equals(allowDownloads, that.allowDownloads)
+ && Objects.equals(allowExternalShares, that.allowExternalShares)
+ && Objects.equals(allowSubscriptions, that.allowSubscriptions)
+ && Objects.equals(dateFormat, that.dateFormat)
+ && Objects.equals(dateTimeFormat, that.dateTimeFormat)
+ && Objects.equals(disablePublish, that.disablePublish)
+ && Objects.equals(enableLegacyAutodetectTypes, that.enableLegacyAutodetectTypes)
+ && Objects.equals(featureShowPermissionsControl, that.featureShowPermissionsControl)
+ && Objects.equals(hidePlotlyModeBar, that.hidePlotlyModeBar);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ allowCustomJsVisualizations,
+ allowDownloads,
+ allowExternalShares,
+ allowSubscriptions,
+ dateFormat,
+ dateTimeFormat,
+ disablePublish,
+ enableLegacyAutodetectTypes,
+ featureShowPermissionsControl,
+ hidePlotlyModeBar);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ClientConfig.class)
+ .add("allowCustomJsVisualizations", allowCustomJsVisualizations)
+ .add("allowDownloads", allowDownloads)
+ .add("allowExternalShares", allowExternalShares)
+ .add("allowSubscriptions", allowSubscriptions)
+ .add("dateFormat", dateFormat)
+ .add("dateTimeFormat", dateTimeFormat)
+ .add("disablePublish", disablePublish)
+ .add("enableLegacyAutodetectTypes", enableLegacyAutodetectTypes)
+ .add("featureShowPermissionsControl", featureShowPermissionsControl)
+ .add("hidePlotlyModeBar", hidePlotlyModeBar)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java
new file mode 100755
index 000000000..7a27bd439
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigAPI.java
@@ -0,0 +1,34 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Redash V2 service for workspace configurations (internal) */
+@Generated
+public class RedashConfigAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(RedashConfigAPI.class);
+
+ private final RedashConfigService impl;
+
+ /** Regular-use constructor */
+ public RedashConfigAPI(ApiClient apiClient) {
+ impl = new RedashConfigImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public RedashConfigAPI(RedashConfigService mock) {
+ impl = mock;
+ }
+
+ /** Read workspace configuration for Redash-v2. */
+ public ClientConfig getConfig() {
+ return impl.getConfig();
+ }
+
+ public RedashConfigService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigImpl.java
new file mode 100755
index 000000000..b38648e7a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigImpl.java
@@ -0,0 +1,30 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of RedashConfig */
+@Generated
+class RedashConfigImpl implements RedashConfigService {
+ private final ApiClient apiClient;
+
+ public RedashConfigImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public ClientConfig getConfig() {
+ String path = "/api/2.0/redash-v2/config";
+ try {
+ Request req = new Request("GET", path);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ClientConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigService.java
new file mode 100755
index 000000000..aa596cfe8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigService.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Redash V2 service for workspace configurations (internal)
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface RedashConfigService {
+ /** Read workspace configuration for Redash-v2. */
+ ClientConfig getConfig();
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java
index 9b4c1187e..3725daf02 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java
@@ -18,9 +18,15 @@ public class UpdateAlertRequest {
@JsonIgnore private String id;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("update_mask")
private String updateMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java
index 85c111d05..3edc04649 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java
@@ -18,9 +18,15 @@ public class UpdateQueryRequest {
private UpdateQueryRequestQuery query;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("update_mask")
private String updateMask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java
index df3de93a8..1cf729a01 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java
@@ -14,9 +14,15 @@ public class UpdateVisualizationRequest {
@JsonIgnore private String id;
/**
- * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
- * of the setting payload will be updated. The field mask needs to be supplied as single string.
- * To specify multiple fields in the field mask, use comma as the separator (no space).
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ *
A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
*/
@JsonProperty("update_mask")
private String updateMask;
diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml
index 95c8c853f..3673a2a3e 100644
--- a/examples/docs/pom.xml
+++ b/examples/docs/pom.xml
@@ -24,7 +24,7 @@
com.databricks
databricks-sdk-java
- 0.39.0
+ 0.40.0
diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml
index e208276ea..cd6bf89bb 100644
--- a/examples/spring-boot-oauth-u2m-demo/pom.xml
+++ b/examples/spring-boot-oauth-u2m-demo/pom.xml
@@ -37,7 +37,7 @@
com.databricks
databricks-sdk-java
- 0.39.0
+ 0.40.0
com.fasterxml.jackson.datatype
diff --git a/pom.xml b/pom.xml
index 48aec6126..1e92a6c4c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
4.0.0
com.databricks
databricks-sdk-parent
- 0.39.0
+ 0.40.0
pom
Databricks SDK for Java
The Databricks SDK for Java includes functionality to accelerate development with Java for
diff --git a/shaded/pom.xml b/shaded/pom.xml
index 937162f84..68c3a39ae 100644
--- a/shaded/pom.xml
+++ b/shaded/pom.xml
@@ -4,7 +4,7 @@
4.0.0
- 0.39.0
+ 0.40.0
com.databricks