Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
9b38571bfe7bf0bc595480f28eb93a8db3116985
8921a828d1741af0952eb5c4f0292c194c0d5f38
4 changes: 3 additions & 1 deletion .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -2086,14 +2086,16 @@
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateRoleRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteBranchRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteEndpointRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteProjectRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java linguist-generated=true
Expand Down
13 changes: 13 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,16 @@
* Add `TABLE_DELTASHARING_OPEN_DIR_BASED` enum value for `com.databricks.sdk.service.catalog.SecurableKind`.
* Add `CREATING` and `CREATE_FAILED` enum values for `com.databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`.
* [Breaking] Remove `accessModes` and `storageLocation` fields for `com.databricks.sdk.service.sharing.Table`.
* Add `errorMessage` field for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule`.
* Add `errorMessage` field for `com.databricks.sdk.service.settings.NccPrivateEndpointRule`.
* Add `errorMessage` field for `com.databricks.sdk.service.settings.UpdatePrivateEndpointRule`.
* Add `RATE_LIMITED` enum value for `com.databricks.sdk.service.compute.TerminationReasonCode`.
* Add `RATE_LIMITED` enum value for `com.databricks.sdk.service.sql.TerminationReasonCode`.
* [Breaking] Add long-running operation configuration for `workspaceClient.postgres().deleteBranch()` method.
* [Breaking] Add long-running operation configuration for `workspaceClient.postgres().deleteEndpoint()` method.
* [Breaking] Add long-running operation configuration for `workspaceClient.postgres().deleteProject()` method.
* [Breaking] Change `deleteBranch()`, `deleteEndpoint()` and `deleteProject()` methods for `workspaceClient.postgres()` service to return `com.databricks.sdk.service.postgres.Operation` class.
* [Breaking] Remove `pgbouncerSettings` field for `com.databricks.sdk.service.postgres.EndpointSettings`.
* [Breaking] Remove `poolerMode` field for `com.databricks.sdk.service.postgres.EndpointSpec`.
* [Breaking] Remove `poolerMode` field for `com.databricks.sdk.service.postgres.EndpointStatus`.
* [Breaking] Remove `pgbouncerSettings` field for `com.databricks.sdk.service.postgres.ProjectDefaultEndpointSettings`.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import org.slf4j.LoggerFactory;

/**
* Apps run directly on a customers Databricks instance, integrate with their data, use and extend
* Apps run directly on a customer's Databricks instance, integrate with their data, use and extend
* Databricks services, and enable users to interact through single sign-on.
*/
@Generated
Expand Down
2 changes: 1 addition & 1 deletion databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;

/**
* Apps run directly on a customers Databricks instance, integrate with their data, use and extend
* Apps run directly on a customer's Databricks instance, integrate with their data, use and extend
* Databricks services, and enable users to interact through single sign-on.
*
* <p>This is the high-level interface, that contains generated methods.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ public enum TerminationReasonCode {
NPIP_TUNNEL_TOKEN_FAILURE,
POD_ASSIGNMENT_FAILURE,
POD_SCHEDULING_FAILURE,
RATE_LIMITED,
REQUEST_REJECTED,
REQUEST_THROTTLED,
RESOURCE_USAGE_BLOCKED,
Expand Down
44 changes: 36 additions & 8 deletions databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -36,41 +36,69 @@ public class DatabaseInstance {
@JsonProperty("custom_tags")
private Collection<CustomTag> customTags;

/** Deprecated. The sku of the instance; this field will always match the value of capacity. */
/**
* Deprecated. The sku of the instance; this field will always match the value of capacity. This
* is an output only field that contains the value computed from the input field combined with
* server side defaults. Use the field without the effective_ prefix to set the value.
*/
@JsonProperty("effective_capacity")
private String effectiveCapacity;

/** The recorded custom tags associated with the instance. */
/**
* The recorded custom tags associated with the instance. This is an output only field that
* contains the value computed from the input field combined with server side defaults. Use the
* field without the effective_ prefix to set the value.
*/
@JsonProperty("effective_custom_tags")
private Collection<CustomTag> effectiveCustomTags;

/** Whether the instance has PG native password login enabled. */
/**
* Whether the instance has PG native password login enabled. This is an output only field that
* contains the value computed from the input field combined with server side defaults. Use the
* field without the effective_ prefix to set the value.
*/
@JsonProperty("effective_enable_pg_native_login")
private Boolean effectiveEnablePgNativeLogin;

/** Whether secondaries serving read-only traffic are enabled. Defaults to false. */
/**
* Whether secondaries serving read-only traffic are enabled. Defaults to false. This is an output
* only field that contains the value computed from the input field combined with server side
* defaults. Use the field without the effective_ prefix to set the value.
*/
@JsonProperty("effective_enable_readable_secondaries")
private Boolean effectiveEnableReadableSecondaries;

/**
* The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults
* to 1 primary and 0 secondaries.
* to 1 primary and 0 secondaries. This is an output only field that contains the value computed
* from the input field combined with server side defaults. Use the field without the effective_
* prefix to set the value.
*/
@JsonProperty("effective_node_count")
private Long effectiveNodeCount;

/**
* The retention window for the instance. This is the time window in days for which the historical
* data is retained.
* data is retained. This is an output only field that contains the value computed from the input
* field combined with server side defaults. Use the field without the effective_ prefix to set
* the value.
*/
@JsonProperty("effective_retention_window_in_days")
private Long effectiveRetentionWindowInDays;

/** Whether the instance is stopped. */
/**
* Whether the instance is stopped. This is an output only field that contains the value computed
* from the input field combined with server side defaults. Use the field without the effective_
* prefix to set the value.
*/
@JsonProperty("effective_stopped")
private Boolean effectiveStopped;

/** The policy that is applied to the instance. */
/**
* The policy that is applied to the instance. This is an output only field that contains the
* value computed from the input field combined with server side defaults. Use the field without
* the effective_ prefix to set the value.
*/
@JsonProperty("effective_usage_policy_id")
private String effectiveUsagePolicyId;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ public class DatabaseInstanceRef {
/**
* For a parent ref instance, this is the LSN on the parent instance from which the instance was
* created. For a child ref instance, this is the LSN on the instance from which the child
* instance was created.
* instance was created. This is an output only field that contains the value computed from the
* input field combined with server side defaults. Use the field without the effective_ prefix to
* set the value.
*/
@JsonProperty("effective_lsn")
private String effectiveLsn;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,11 @@ public class DatabaseInstanceRole {
@JsonProperty("attributes")
private DatabaseInstanceRoleAttributes attributes;

/** The attributes that are applied to the role. */
/**
* The attributes that are applied to the role. This is an output only field that contains the
* value computed from the input field combined with server side defaults. Use the field without
* the effective_ prefix to set the value.
*/
@JsonProperty("effective_attributes")
private DatabaseInstanceRoleAttributes effectiveAttributes;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,18 @@ public class SyncedDatabaseTable {
/**
* The name of the database instance that this table is registered to. This field is always
* returned, and for tables inside database catalogs is inferred database instance associated with
* the catalog.
* the catalog. This is an output only field that contains the value computed from the input field
* combined with server side defaults. Use the field without the effective_ prefix to set the
* value.
*/
@JsonProperty("effective_database_instance_name")
private String effectiveDatabaseInstanceName;

/** The name of the logical database that this table is registered to. */
/**
* The name of the logical database that this table is registered to. This is an output only field
* that contains the value computed from the input field combined with server side defaults. Use
* the field without the effective_ prefix to set the value.
*/
@JsonProperty("effective_logical_database_name")
private String effectiveLogicalDatabaseName;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@
/** Databricks Error that is returned by all Databricks APIs. */
@Generated
public class DatabricksServiceExceptionWithDetailsProto {
/**
* @pbjson-skip
*/
/** */
@JsonProperty("details")
private Collection<Object> details;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package com.databricks.sdk.service.postgres;

import com.databricks.sdk.core.DatabricksException;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.service.common.lro.LroOptions;
import com.databricks.sdk.support.Generated;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Wrapper for interacting with a long-running deleteBranch operation. Provides methods to wait for
* completion, check status, cancel, and access metadata.
*/
@Generated
public class DeleteBranchOperation {
private static final Logger LOG = LoggerFactory.getLogger(DeleteBranchOperation.class);

private final PostgresService impl;
private Operation operation;
private final ObjectMapper objectMapper;

public DeleteBranchOperation(PostgresService impl, Operation operation) {
this.impl = impl;
this.operation = operation;
this.objectMapper = SerDeUtils.createMapper();
}

/**
* Wait for the operation to complete and return the resulting . Waits indefinitely if no timeout
* is specified.
*
* @return the created
* @throws TimeoutException if the operation doesn't complete within the timeout
* @throws DatabricksException if the operation fails
*/
public void waitForCompletion() throws TimeoutException {
waitForCompletion(Optional.empty());
}

/**
* Wait for the operation to complete and return the resulting .
*
* @param options the options for configuring the wait behavior, can be empty for defaults
* @return the created
* @throws TimeoutException if the operation doesn't complete within the timeout
* @throws DatabricksException if the operation fails
*/
public void waitForCompletion(Optional<LroOptions> options) throws TimeoutException {
Optional<Duration> timeout = options.flatMap(LroOptions::getTimeout);
long deadline =
timeout.isPresent()
? System.currentTimeMillis() + timeout.get().toMillis()
: Long.MAX_VALUE;
String statusMessage = "polling operation...";
int attempt = 1;

while (System.currentTimeMillis() < deadline) {
// Refresh the operation state
refreshOperation();

if (operation.getDone() != null && operation.getDone()) {
// Operation completed, check for success or failure
if (operation.getError() != null) {
String errorMsg = "unknown error";
if (operation.getError().getMessage() != null
&& !operation.getError().getMessage().isEmpty()) {
errorMsg = operation.getError().getMessage();
}

if (operation.getError().getErrorCode() != null) {
errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg);
}

throw new DatabricksException("Operation failed: " + errorMsg);
}

// Operation completed successfully, unmarshal response
if (operation.getResponse() == null) {
throw new DatabricksException("Operation completed but no response available");
}

try {
JsonNode responseJson = objectMapper.valueToTree(operation.getResponse());
objectMapper.treeToValue(responseJson, Void.class);
} catch (JsonProcessingException e) {
throw new DatabricksException("Failed to unmarshal response: " + e.getMessage(), e);
}
}

// Operation still in progress, wait before polling again
String prefix = String.format("operation=%s", operation.getName());
int sleep = Math.min(attempt, 10); // sleep 10s max per attempt
LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep);

try {
Thread.sleep((long) (sleep * 1000L + Math.random() * 1000));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new DatabricksException("Current thread was interrupted", e);
}
attempt++;
}

String timeoutMessage =
timeout.isPresent()
? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage)
: String.format("Operation timed out: %s", statusMessage);
throw new TimeoutException(timeoutMessage);
}

/**
* Get the operation name.
*
* @return the operation name
*/
public String getName() {
return operation.getName();
}

/**
* Get the operation metadata.
*
* @return the operation metadata, or null if not available
* @throws DatabricksException if the metadata cannot be deserialized
*/
public BranchOperationMetadata getMetadata() {
if (operation.getMetadata() == null) {
return null;
}

try {
JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata());
return objectMapper.treeToValue(metadataJson, BranchOperationMetadata.class);
} catch (JsonProcessingException e) {
throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e);
}
}

/**
* Check if the operation is done. This method refreshes the operation state before checking.
*
* @return true if the operation is complete, false otherwise
* @throws DatabricksException if the status check fails
*/
public boolean isDone() {
refreshOperation();
return operation.getDone() != null && operation.getDone();
}

/** Refresh the operation state by polling the server. */
private void refreshOperation() {
operation = impl.getOperation(new GetOperationRequest().setName(operation.getName()));
}
}
Loading
Loading