Skip to content

Commit 5b6437e

Browse files
[PECOBLR-322] Update SDK to the latest version (0.43.0) (#781)
1 parent 082439a commit 5b6437e

File tree

12 files changed

+231
-148
lines changed

12 files changed

+231
-148
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
.idea/
2+
.vscode/
23
target/
34
*.iml
45
*DS_Store

pom.xml

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,9 @@
5252
<google.findbugs.annotations.version>3.0.1</google.findbugs.annotations.version>
5353
<immutables.value.version>2.9.2</immutables.value.version>
5454
<httpclient.version>4.5.14</httpclient.version>
55+
<commons-configuration.version>2.10.1</commons-configuration.version>
5556
<commons-io.version>2.14.0</commons-io.version>
56-
<databricks-sdk.version>0.37.0</databricks-sdk.version>
57+
<databricks-sdk.version>0.43.0</databricks-sdk.version>
5758
<maven-surefire-plugin.version>3.1.2</maven-surefire-plugin.version>
5859
<sql-logic-test.version>0.3</sql-logic-test.version>
5960
<lz4-compression.version>1.8.0</lz4-compression.version>
@@ -72,6 +73,11 @@
7273
<artifactId>databricks-sdk-java</artifactId>
7374
<version>${databricks-sdk.version}</version>
7475
</dependency>
76+
<dependency>
77+
<groupId>org.apache.commons</groupId>
78+
<artifactId>commons-configuration2</artifactId>
79+
<version>${commons-configuration.version}</version>
80+
</dependency>
7581
<dependency>
7682
<groupId>org.apache.arrow</groupId>
7783
<artifactId>arrow-memory-core</artifactId>

src/main/java/com/databricks/jdbc/api/impl/DatabricksConnectionContext.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -687,12 +687,10 @@ public List<Integer> getOAuth2RedirectUrlPorts() {
687687
.map(Integer::parseInt)
688688
.collect(Collectors.toList());
689689
} catch (NumberFormatException e) {
690-
LOGGER.warn(
691-
"Invalid port format in OAuth2RedirectUrlPort: {}. Using default port {}.",
692-
portsStr,
693-
DatabricksJdbcUrlParams.OAUTH_REDIRECT_URL_PORT.getDefaultValue());
694-
return List.of(
695-
Integer.parseInt(DatabricksJdbcUrlParams.OAUTH_REDIRECT_URL_PORT.getDefaultValue()));
690+
String errorMessage =
691+
String.format("Invalid port format in OAuth2RedirectUrlPort: %s.", portsStr);
692+
LOGGER.error(errorMessage, e);
693+
throw new IllegalArgumentException(errorMessage);
696694
}
697695
}
698696

src/main/java/com/databricks/jdbc/api/impl/volume/DBFSVolumeClient.java

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,10 @@
2323
import com.databricks.jdbc.model.client.filesystem.*;
2424
import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode;
2525
import com.databricks.sdk.WorkspaceClient;
26+
import com.databricks.sdk.core.ApiClient;
2627
import com.databricks.sdk.core.DatabricksException;
2728
import com.databricks.sdk.core.error.platform.NotFound;
29+
import com.databricks.sdk.core.http.Request;
2830
import com.google.common.annotations.VisibleForTesting;
2931
import java.io.Closeable;
3032
import java.io.IOException;
@@ -44,19 +46,22 @@ public class DBFSVolumeClient implements IDatabricksVolumeClient, Closeable {
4446
private VolumeInputStream volumeInputStream = null;
4547
private long volumeStreamContentLength = -1L;
4648
final WorkspaceClient workspaceClient;
49+
final ApiClient apiClient;
4750
private final String allowedVolumeIngestionPaths;
4851

4952
@VisibleForTesting
5053
public DBFSVolumeClient(WorkspaceClient workspaceClient) {
5154
this.connectionContext = null;
5255
this.workspaceClient = workspaceClient;
56+
this.apiClient = workspaceClient.apiClient();
5357
this.databricksHttpClient = null;
5458
this.allowedVolumeIngestionPaths = "";
5559
}
5660

5761
public DBFSVolumeClient(IDatabricksConnectionContext connectionContext) {
5862
this.connectionContext = connectionContext;
5963
this.workspaceClient = getWorkspaceClientFromConnectionContext(connectionContext);
64+
this.apiClient = workspaceClient.apiClient();
6065
this.databricksHttpClient =
6166
DatabricksHttpClientFactory.getInstance()
6267
.getClient(connectionContext, HttpClientType.VOLUME);
@@ -403,10 +408,10 @@ CreateUploadUrlResponse getCreateUploadUrlResponse(String objectPath)
403408

404409
CreateUploadUrlRequest request = new CreateUploadUrlRequest(objectPath);
405410
try {
406-
return workspaceClient
407-
.apiClient()
408-
.POST(CREATE_UPLOAD_URL_PATH, request, CreateUploadUrlResponse.class, JSON_HTTP_HEADERS);
409-
} catch (DatabricksException e) {
411+
Request req = new Request(Request.POST, CREATE_UPLOAD_URL_PATH, apiClient.serialize(request));
412+
req.withHeaders(JSON_HTTP_HEADERS);
413+
return apiClient.execute(req, CreateUploadUrlResponse.class);
414+
} catch (IOException | DatabricksException e) {
410415
String errorMessage =
411416
String.format("Failed to get create upload url response - {%s}", e.getMessage());
412417
LOGGER.error(e, errorMessage);
@@ -426,14 +431,11 @@ CreateDownloadUrlResponse getCreateDownloadUrlResponse(String objectPath)
426431
CreateDownloadUrlRequest request = new CreateDownloadUrlRequest(objectPath);
427432

428433
try {
429-
return workspaceClient
430-
.apiClient()
431-
.POST(
432-
CREATE_DOWNLOAD_URL_PATH,
433-
request,
434-
CreateDownloadUrlResponse.class,
435-
JSON_HTTP_HEADERS);
436-
} catch (DatabricksException e) {
434+
Request req =
435+
new Request(Request.POST, CREATE_DOWNLOAD_URL_PATH, apiClient.serialize(request));
436+
req.withHeaders(JSON_HTTP_HEADERS);
437+
return apiClient.execute(req, CreateDownloadUrlResponse.class);
438+
} catch (IOException | DatabricksException e) {
437439
String errorMessage =
438440
String.format("Failed to get create download url response - {%s}", e.getMessage());
439441
LOGGER.error(e, errorMessage);
@@ -452,10 +454,10 @@ CreateDeleteUrlResponse getCreateDeleteUrlResponse(String objectPath)
452454
CreateDeleteUrlRequest request = new CreateDeleteUrlRequest(objectPath);
453455

454456
try {
455-
return workspaceClient
456-
.apiClient()
457-
.POST(CREATE_DELETE_URL_PATH, request, CreateDeleteUrlResponse.class, JSON_HTTP_HEADERS);
458-
} catch (DatabricksException e) {
457+
Request req = new Request(Request.POST, CREATE_DELETE_URL_PATH, apiClient.serialize(request));
458+
req.withHeaders(JSON_HTTP_HEADERS);
459+
return apiClient.execute(req, CreateDeleteUrlResponse.class);
460+
} catch (IOException | DatabricksException e) {
459461
String errorMessage =
460462
String.format("Failed to get create delete url response - {%s}", e.getMessage());
461463
LOGGER.error(e, errorMessage);
@@ -470,10 +472,11 @@ ListResponse getListResponse(String listPath) throws DatabricksVolumeOperationEx
470472
String.format("Entering getListResponse method with parameters : listPath={%s}", listPath));
471473
ListRequest request = new ListRequest(listPath);
472474
try {
473-
return workspaceClient
474-
.apiClient()
475-
.GET(LIST_PATH, request, ListResponse.class, JSON_HTTP_HEADERS);
476-
} catch (DatabricksException e) {
475+
Request req = new Request(Request.GET, LIST_PATH);
476+
req.withHeaders(JSON_HTTP_HEADERS);
477+
ApiClient.setQuery(req, request);
478+
return apiClient.execute(req, ListResponse.class);
479+
} catch (IOException | DatabricksException e) {
477480
String errorMessage = String.format("Failed to get list response - {%s}", e.getMessage());
478481
LOGGER.error(e, errorMessage);
479482
throw new DatabricksVolumeOperationException(

src/main/java/com/databricks/jdbc/dbclient/impl/common/ClientConfigurator.java

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ public void setupU2MConfig() throws DatabricksParsingException {
143143
.setClientSecret(connectionContext.getClientSecret())
144144
.setOAuthRedirectUrl(redirectUrl);
145145

146-
LOGGER.info("Using OAuth redirect URL: {}", redirectUrl);
146+
LOGGER.info("Using OAuth redirect URL: %s", redirectUrl);
147147

148148
if (!databricksConfig.isAzure()) {
149149
databricksConfig.setScopes(connectionContext.getOAuthScopesForU2M());
@@ -171,25 +171,23 @@ int findAvailablePort(List<Integer> initialPorts) {
171171
portsToTry.add(startPort + i);
172172
}
173173
LOGGER.debug(
174-
"Single port provided ({}), will try ports {} through {}",
175-
startPort,
176-
startPort,
177-
startPort + maxAttempts - 1);
174+
"Single port provided (%s), will try ports %s through %s",
175+
startPort, startPort, startPort + maxAttempts - 1);
178176
} else {
179177
portsToTry = initialPorts;
180-
LOGGER.debug("Multiple ports provided, will try: {}", portsToTry);
178+
LOGGER.debug("Multiple ports provided, will try: %s", portsToTry);
181179
}
182180

183181
// Try each port in the list
184182
for (int port : portsToTry) {
185183
if (isPortAvailable(port)) {
186184
return port;
187185
}
188-
LOGGER.debug("Port {} is not available, trying next port", port);
186+
LOGGER.debug("Port %s is not available, trying next port", port);
189187
}
190188

191189
// No available ports found
192-
LOGGER.error("No available ports found among: {}", portsToTry);
190+
LOGGER.error("No available ports found among: %s", portsToTry);
193191
throw new DatabricksException(
194192
"No available port found for OAuth redirect URL. Tried ports: " + portsToTry);
195193
}

0 commit comments

Comments
 (0)