Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
59c4c0f3d5f0ef00cd5350b5674e941a7606d91a
bce9714cf1393c458531de222ef43e1e3afb57b0
6,298 changes: 3,155 additions & 3,143 deletions .gitattributes

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,14 @@
### Internal Changes

### API Changes
* Add `batchCreateMaterializedFeatures()` method for `workspaceClient.featureEngineering()` service.
* Add `retrieveUserVisibleMetrics()` method for `workspaceClient.vectorSearchEndpoints()` service.
* Add `purpose` field for `com.databricks.sdk.service.dashboards.TextAttachment`.
* Add `lineageContext` field for `com.databricks.sdk.service.ml.Feature`.
* Add `ingestFromUcForeignCatalog` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`.
* Add `AUTOSCALE_V2` enum value for `com.databricks.sdk.service.compute.EventDetailsCause`.
* Add `UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION` enum value for `com.databricks.sdk.service.dashboards.MessageErrorType`.
* Add `RED_STATE` and `YELLOW_STATE` enum values for `com.databricks.sdk.service.vectorsearch.EndpointStatusState`.
* [Breaking] Change `tableNames` field for `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` to be required.
* Change `tableNames` field for `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` to be required.
* [Breaking] Change `onlineStoreConfig` field for `com.databricks.sdk.service.ml.MaterializedFeature` to type `com.databricks.sdk.service.ml.OnlineStoreConfig` class.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ public CatalogInfo get(GetCatalogRequest request) {
* reached.
*/
public Iterable<CatalogInfo> list(ListCatalogsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ public ConnectionInfo get(GetConnectionRequest request) {
* reached.
*/
public Iterable<ConnectionInfo> list(ListConnectionsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,10 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
* reached.
*/
public Iterable<ExternalLocationInfo> list(ListExternalLocationsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ public Iterable<FunctionInfo> list(String catalogName, String schemaName) {
* reached.
*/
public Iterable<FunctionInfo> list(ListFunctionsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ public MetastoreInfo get(GetMetastoreRequest request) {
* reached.
*/
public Iterable<MetastoreInfo> list(ListMetastoresRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ public Iterable<SchemaInfo> list(String catalogName) {
* reached.
*/
public Iterable<SchemaInfo> list(ListSchemasRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import com.databricks.sdk.support.Generated;

/** Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271 */
/** Latest kind: CONNECTION_POSTGRESQL_AWS_SERVICE_CRED = 271; Next id:272 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
* reached.
*/
public Iterable<StorageCredentialInfo> list(ListStorageCredentialsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ public Iterable<SystemSchemaInfo> list(String metastoreId) {
* reached.
*/
public Iterable<SystemSchemaInfo> list(ListSystemSchemasRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,10 @@ public Iterable<TableInfo> list(String catalogName, String schemaName) {
* reached.
*/
public Iterable<TableInfo> list(ListTablesRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::list,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ public Iterable<WorkspaceBinding> getBindings(String securableType, String secur
* reached.
*/
public Iterable<WorkspaceBinding> getBindings(GetBindingsRequest request) {

if (request.getMaxResults() == null) {
request.setMaxResults(0L);
}
return new Paginator<>(
request,
impl::getBindings,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
public enum EventDetailsCause {
AUTORECOVERY,
AUTOSCALE,
AUTOSCALE_V2,
REPLACE_BAD_NODES,
USER_REQUEST,
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ public enum MessageErrorType {
TOO_MANY_TABLES_EXCEPTION,
UNEXPECTED_REPLY_PROCESS_EXCEPTION,
UNKNOWN_AI_MODEL,
UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION,
WAREHOUSE_ACCESS_MISSING_EXCEPTION,
WAREHOUSE_NOT_FOUND_EXCEPTION,
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ public class TextAttachment {
@JsonProperty("id")
private String id;

/** Purpose/intent of this text attachment */
@JsonProperty("purpose")
private TextAttachmentPurpose purpose;

public TextAttachment setContent(String content) {
this.content = content;
return this;
Expand All @@ -35,21 +39,36 @@ public String getId() {
return id;
}

public TextAttachment setPurpose(TextAttachmentPurpose purpose) {
this.purpose = purpose;
return this;
}

public TextAttachmentPurpose getPurpose() {
return purpose;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TextAttachment that = (TextAttachment) o;
return Objects.equals(content, that.content) && Objects.equals(id, that.id);
return Objects.equals(content, that.content)
&& Objects.equals(id, that.id)
&& Objects.equals(purpose, that.purpose);
}

@Override
public int hashCode() {
return Objects.hash(content, id);
return Objects.hash(content, id, purpose);
}

@Override
public String toString() {
return new ToStringer(TextAttachment.class).add("content", content).add("id", id).toString();
return new ToStringer(TextAttachment.class)
.add("content", content)
.add("id", id)
.add("purpose", purpose)
.toString();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

package com.databricks.sdk.service.dashboards;

import com.databricks.sdk.support.Generated;

/** Purpose/intent of a text attachment */
@Generated
public enum TextAttachmentPurpose {
FOLLOW_UP_QUESTION,
}
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,9 @@ public AccountGroup get(GetAccountGroupRequest request) {

/**
* Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this
* endpoint will not return members. Instead, members should be retrieved by iterating through
* `Get group details`.
* endpoint will no longer return members. Instead, members should be retrieved by iterating
* through `Get group details`. Existing accounts that rely on this attribute will not be impacted
* and will continue receiving member data as before.
*/
public Iterable<AccountGroup> list(ListAccountGroupsRequest request) {
request.setStartIndex(1L);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@ public interface AccountGroupsV2Service {

/**
* Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this
* endpoint will not return members. Instead, members should be retrieved by iterating through
* `Get group details`.
* endpoint will no longer return members. Instead, members should be retrieved by iterating
* through `Get group details`. Existing accounts that rely on this attribute will not be impacted
* and will continue receiving member data as before.
*/
ListAccountGroupsResponse list(ListAccountGroupsRequest listAccountGroupsRequest);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ public class CreateJob {

/**
* A list of task execution environment specifications that can be referenced by serverless tasks
* of this job. An environment is required to be present for serverless tasks. For serverless
* notebook tasks, the environment is accessible in the notebook environment panel. For other
* serverless tasks, the task environment is required to be specified using environment_key in the
* task settings.
* of this job. For serverless notebook tasks, if the environment_key is not specified, the
* notebook environment will be used if present. If a jobs environment is specified, it will
* override the notebook environment. For other serverless tasks, the task environment is required
* to be specified using environment_key in the task settings.
*/
@JsonProperty("environments")
private Collection<JobEnvironment> environments;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@ public class JobSettings {

/**
* A list of task execution environment specifications that can be referenced by serverless tasks
* of this job. An environment is required to be present for serverless tasks. For serverless
* notebook tasks, the environment is accessible in the notebook environment panel. For other
* serverless tasks, the task environment is required to be specified using environment_key in the
* task settings.
* of this job. For serverless notebook tasks, if the environment_key is not specified, the
* notebook environment will be used if present. If a jobs environment is specified, it will
* override the notebook environment. For other serverless tasks, the task environment is required
* to be specified using environment_key in the task settings.
*/
@JsonProperty("environments")
private Collection<JobEnvironment> environments;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
* multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster
* management, monitoring, and error reporting for all of your jobs. You can run your jobs
* immediately or periodically through an easy-to-use scheduling system. You can implement job tasks
* using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java
* using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java
* applications.
*
* <p>You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
* multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster
* management, monitoring, and error reporting for all of your jobs. You can run your jobs
* immediately or periodically through an easy-to-use scheduling system. You can implement job tasks
* using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java
* using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java
* applications.
*
* <p>You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

package com.databricks.sdk.service.ml;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collection;
import java.util.Objects;

@Generated
public class BatchCreateMaterializedFeaturesRequest {
/** The requests to create materialized features. */
@JsonProperty("requests")
private Collection<CreateMaterializedFeatureRequest> requests;

public BatchCreateMaterializedFeaturesRequest setRequests(
Collection<CreateMaterializedFeatureRequest> requests) {
this.requests = requests;
return this;
}

public Collection<CreateMaterializedFeatureRequest> getRequests() {
return requests;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BatchCreateMaterializedFeaturesRequest that = (BatchCreateMaterializedFeaturesRequest) o;
return Objects.equals(requests, that.requests);
}

@Override
public int hashCode() {
return Objects.hash(requests);
}

@Override
public String toString() {
return new ToStringer(BatchCreateMaterializedFeaturesRequest.class)
.add("requests", requests)
.toString();
}
}
Loading
Loading