Skip to content

Commit 0940133

Browse files
committed
resolved merge conflicts
2 parents 421476f + d76de89 commit 0940133

27 files changed

+527
-197
lines changed

pom.xml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737
<spotless.version>2.43.0</spotless.version>
3838
<commons-io.version>2.13.0</commons-io.version>
39-
<databricks-sdk.version>0.17.0</databricks-sdk.version>
39+
<databricks-sdk.version>0.26.0</databricks-sdk.version>
4040
<maven-surefire-plugin.version>3.1.2</maven-surefire-plugin.version>
4141
<sql-logic-test.version>0.3</sql-logic-test.version>
4242
<lz4-compression.version>1.8.0</lz4-compression.version>
@@ -67,6 +67,11 @@
6767
<artifactId>arrow-vector</artifactId>
6868
<version>${arrow.version}</version>
6969
</dependency>
70+
<dependency>
71+
<groupId>org.slf4j</groupId>
72+
<artifactId>slf4j-reload4j</artifactId>
73+
<version>2.0.13</version>
74+
</dependency>
7075
<dependency>
7176
<groupId>com.diffplug.spotless</groupId>
7277
<artifactId>spotless-maven-plugin</artifactId>

src/main/java/com/databricks/jdbc/client/http/DatabricksHttpClient.java

Lines changed: 39 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
package com.databricks.jdbc.client.http;
22

33
import static com.databricks.jdbc.client.http.RetryHandler.*;
4-
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.FAKE_SERVICE_URI_PROP_SUFFIX;
5-
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.IS_FAKE_SERVICE_TEST_PROP;
4+
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.*;
65
import static io.netty.util.NetUtil.LOCALHOST;
76

87
import com.databricks.jdbc.client.DatabricksHttpException;
@@ -12,7 +11,10 @@
1211
import com.databricks.jdbc.commons.util.HttpExecuteExceptionUtil;
1312
import com.databricks.jdbc.commons.util.LoggingUtil;
1413
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
14+
import com.databricks.sdk.core.DatabricksConfig;
15+
import com.databricks.sdk.core.ProxyConfig;
1516
import com.databricks.sdk.core.UserAgent;
17+
import com.databricks.sdk.core.utils.ProxyUtils;
1618
import com.google.common.annotations.VisibleForTesting;
1719
import java.io.IOException;
1820
import java.security.KeyManagementException;
@@ -26,20 +28,14 @@
2628
import org.apache.http.HttpException;
2729
import org.apache.http.HttpHost;
2830
import org.apache.http.HttpResponse;
29-
import org.apache.http.HttpResponseInterceptor;
30-
import org.apache.http.auth.AuthScope;
31-
import org.apache.http.auth.UsernamePasswordCredentials;
32-
import org.apache.http.client.CredentialsProvider;
3331
import org.apache.http.client.config.RequestConfig;
3432
import org.apache.http.client.methods.CloseableHttpResponse;
3533
import org.apache.http.client.methods.HttpUriRequest;
3634
import org.apache.http.conn.UnsupportedSchemeException;
3735
import org.apache.http.conn.routing.HttpRoute;
3836
import org.apache.http.conn.ssl.NoopHostnameVerifier;
39-
import org.apache.http.impl.client.BasicCredentialsProvider;
4037
import org.apache.http.impl.client.CloseableHttpClient;
4138
import org.apache.http.impl.client.HttpClientBuilder;
42-
import org.apache.http.impl.client.ProxyAuthenticationStrategy;
4339
import org.apache.http.impl.conn.DefaultSchemePortResolver;
4440
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
4541
import org.apache.http.protocol.HttpContext;
@@ -147,16 +143,12 @@ private CloseableHttpClient makeClosableHttpClient(
147143
.setConnectionManager(connectionManager)
148144
.setUserAgent(getUserAgent())
149145
.setDefaultRequestConfig(makeRequestConfig())
150-
.setRetryHandler(
151-
(exception, executionCount, context) ->
152-
handleRetry(exception, executionCount, context))
153-
.addInterceptorFirst(
154-
(HttpResponseInterceptor)
155-
(httpResponse, httpContext) ->
156-
handleResponseInterceptor(httpResponse, httpContext));
157-
158-
configureProxy(connectionContext, builder);
159-
146+
.setRetryHandler(this::handleRetry)
147+
.addInterceptorFirst(this::handleResponseInterceptor);
148+
setupProxy(connectionContext, builder);
149+
if (Boolean.parseBoolean(System.getProperty(IS_FAKE_SERVICE_TEST_PROP))) {
150+
setFakeServiceRouteInHttpClient(builder);
151+
}
160152
return builder.build();
161153
}
162154

@@ -266,51 +258,39 @@ private void initializeRetryCounts(HttpContext httpContext) {
266258
}
267259
}
268260

269-
private void configureProxy(
261+
@VisibleForTesting
262+
public static void setupProxy(
270263
IDatabricksConnectionContext connectionContext, HttpClientBuilder builder) {
271-
if (connectionContext.getUseSystemProxy()) {
272-
builder.useSystemProperties();
273-
}
274-
// Override system proxy if proxy details are explicitly provided
275-
// If cloud fetch proxy is provided use that, else use the regular proxy
264+
String proxyHost = null;
265+
Integer proxyPort = null;
266+
String proxyUser = null;
267+
String proxyPassword = null;
268+
ProxyConfig.ProxyAuthType proxyAuth = connectionContext.getProxyAuthType();
269+
// System proxy is handled by the SDK.
270+
// If proxy details are explicitly provided use those for the connection.
276271
if (connectionContext.getUseCloudFetchProxy()) {
277-
setProxyDetailsInHttpClient(
278-
builder,
279-
connectionContext.getCloudFetchProxyHost(),
280-
connectionContext.getCloudFetchProxyPort(),
281-
connectionContext.getUseCloudFetchProxyAuth(),
282-
connectionContext.getCloudFetchProxyUser(),
283-
connectionContext.getCloudFetchProxyPassword());
272+
proxyHost = connectionContext.getCloudFetchProxyHost();
273+
proxyPort = connectionContext.getCloudFetchProxyPort();
274+
proxyUser = connectionContext.getCloudFetchProxyUser();
275+
proxyPassword = connectionContext.getCloudFetchProxyPassword();
276+
proxyAuth = connectionContext.getCloudFetchProxyAuthType();
284277
} else if (connectionContext.getUseProxy()) {
285-
setProxyDetailsInHttpClient(
286-
builder,
287-
connectionContext.getProxyHost(),
288-
connectionContext.getProxyPort(),
289-
connectionContext.getUseProxyAuth(),
290-
connectionContext.getProxyUser(),
291-
connectionContext.getProxyPassword());
292-
} else if (Boolean.parseBoolean(System.getProperty(IS_FAKE_SERVICE_TEST_PROP))) {
293-
setFakeServiceRouteInHttpClient(builder);
278+
proxyHost = connectionContext.getProxyHost();
279+
proxyPort = connectionContext.getProxyPort();
280+
proxyUser = connectionContext.getProxyUser();
281+
proxyPassword = connectionContext.getProxyPassword();
282+
proxyAuth = connectionContext.getProxyAuthType();
294283
}
295-
}
296-
297-
@VisibleForTesting
298-
public static void setProxyDetailsInHttpClient(
299-
HttpClientBuilder builder,
300-
String proxyHost,
301-
int proxyPort,
302-
Boolean useProxyAuth,
303-
String proxyUser,
304-
String proxyPassword) {
305-
builder.setProxy(new HttpHost(proxyHost, proxyPort));
306-
if (useProxyAuth) {
307-
CredentialsProvider credsProvider = new BasicCredentialsProvider();
308-
credsProvider.setCredentials(
309-
new AuthScope(proxyHost, proxyPort),
310-
new UsernamePasswordCredentials(proxyUser, proxyPassword));
311-
builder
312-
.setDefaultCredentialsProvider(credsProvider)
313-
.setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy());
284+
if (proxyHost != null || connectionContext.getUseSystemProxy()) {
285+
ProxyConfig proxyConfig =
286+
new ProxyConfig(new DatabricksConfig())
287+
.setUseSystemProperties(connectionContext.getUseSystemProxy())
288+
.setHost(proxyHost)
289+
.setPort(proxyPort)
290+
.setUsername(proxyUser)
291+
.setPassword(proxyPassword)
292+
.setProxyAuthType(proxyAuth);
293+
ProxyUtils.setupProxy(proxyConfig, builder);
314294
}
315295
}
316296

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
package com.databricks.jdbc.client.impl.helper;
2+
3+
import com.databricks.jdbc.core.DatabricksParsingException;
4+
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
5+
import com.databricks.sdk.core.DatabricksConfig;
6+
7+
public class ClientUtils {
8+
public static DatabricksConfig generateDatabricksConfig(
9+
IDatabricksConnectionContext connectionContext) throws DatabricksParsingException {
10+
DatabricksConfig databricksConfig =
11+
new DatabricksConfig()
12+
.setHost(connectionContext.getHostUrl())
13+
.setToken(connectionContext.getToken())
14+
.setUseSystemPropertiesHttp(connectionContext.getUseSystemProxy());
15+
// Setup proxy settings
16+
if (connectionContext.getUseProxy()) {
17+
databricksConfig
18+
.setProxyHost(connectionContext.getProxyHost())
19+
.setProxyPort(connectionContext.getProxyPort());
20+
}
21+
databricksConfig
22+
.setProxyAuthType(connectionContext.getProxyAuthType())
23+
.setProxyUsername(connectionContext.getProxyUser())
24+
.setProxyPassword(connectionContext.getProxyPassword());
25+
return databricksConfig;
26+
}
27+
}

src/main/java/com/databricks/jdbc/client/impl/helper/MetadataResultConstants.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ public class MetadataResultConstants {
251251
IS_AUTO_INCREMENT_COLUMN,
252252
USER_DATA_TYPE_COLUMN,
253253
IS_GENERATED_COLUMN);
254-
public static String NULL_STRING = "null";
254+
public static String NULL_STRING = "NULL";
255255

256256
public static List<ResultColumn> TYPE_INFO_COLUMNS =
257257
List.of(

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksSdkClient.java

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
import com.databricks.jdbc.client.DatabricksClient;
77
import com.databricks.jdbc.client.StatementType;
8+
import com.databricks.jdbc.client.impl.helper.ClientUtils;
89
import com.databricks.jdbc.client.sqlexec.*;
910
import com.databricks.jdbc.client.sqlexec.CloseStatementRequest;
1011
import com.databricks.jdbc.client.sqlexec.CreateSessionRequest;
@@ -25,6 +26,7 @@
2526
import com.databricks.sdk.core.ApiClient;
2627
import com.databricks.sdk.core.DatabricksConfig;
2728
import com.databricks.sdk.service.sql.*;
29+
import com.google.common.annotations.VisibleForTesting;
2830
import java.sql.SQLException;
2931
import java.time.Instant;
3032
import java.util.Collection;
@@ -36,8 +38,6 @@
3638
/** Implementation of DatabricksClient interface using Databricks Java SDK. */
3739
public class DatabricksSdkClient implements DatabricksClient {
3840
private static final String SYNC_TIMEOUT_VALUE = "10s";
39-
private static final String ASYNC_TIMEOUT_VALUE = "0s";
40-
4141
private final IDatabricksConnectionContext connectionContext;
4242
private final DatabricksConfig databricksConfig;
4343
private final WorkspaceClient workspaceClient;
@@ -51,17 +51,12 @@ private static Map<String, String> getHeaders() {
5151
public DatabricksSdkClient(IDatabricksConnectionContext connectionContext)
5252
throws DatabricksParsingException {
5353
this.connectionContext = connectionContext;
54-
// TODO: [PECO-1486] pass on proxy settings to SDK once changes are merged in SDK
55-
// Handle more auth types
56-
this.databricksConfig =
57-
new DatabricksConfig()
58-
.setHost(connectionContext.getHostUrl())
59-
.setToken(connectionContext.getToken());
60-
54+
this.databricksConfig = ClientUtils.generateDatabricksConfig(connectionContext);
6155
OAuthAuthenticator authenticator = new OAuthAuthenticator(connectionContext);
62-
this.workspaceClient = authenticator.getWorkspaceClient();
56+
this.workspaceClient = authenticator.getWorkspaceClient(this.databricksConfig);
6357
}
6458

59+
@VisibleForTesting
6560
public DatabricksSdkClient(
6661
IDatabricksConnectionContext connectionContext,
6762
StatementExecutionService statementExecutionService,

src/main/java/com/databricks/jdbc/client/impl/thrift/commons/DatabricksThriftAccessor.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import com.databricks.jdbc.client.DatabricksHttpException;
88
import com.databricks.jdbc.client.StatementType;
99
import com.databricks.jdbc.client.http.DatabricksHttpClient;
10+
import com.databricks.jdbc.client.impl.helper.ClientUtils;
1011
import com.databricks.jdbc.client.impl.thrift.generated.*;
1112
import com.databricks.jdbc.commons.CommandName;
1213
import com.databricks.jdbc.commons.LogLevel;
@@ -33,7 +34,9 @@ public class DatabricksThriftAccessor {
3334
public DatabricksThriftAccessor(IDatabricksConnectionContext connectionContext)
3435
throws DatabricksParsingException {
3536
enableDirectResults = connectionContext.getDirectResultMode();
36-
this.databricksConfig = new OAuthAuthenticator(connectionContext).getDatabricksConfig();
37+
this.databricksConfig = ClientUtils.generateDatabricksConfig(connectionContext);
38+
OAuthAuthenticator authenticator = new OAuthAuthenticator(connectionContext);
39+
authenticator.setupDatabricksConfig(databricksConfig);
3740
this.databricksConfig.resolve();
3841
Map<String, String> authHeaders = databricksConfig.authenticate();
3942
String endPointUrl = connectionContext.getEndpointURL();
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
package com.databricks.jdbc.commons.util;
2+
3+
import static com.databricks.jdbc.client.impl.helper.MetadataResultConstants.NULL_STRING;
4+
5+
import com.databricks.jdbc.core.DatabricksValidationException;
6+
import com.databricks.jdbc.core.ImmutableSqlParameter;
7+
import java.util.Map;
8+
9+
public class SQLInterpolator {
10+
private static String escapeApostrophes(String input) {
11+
if (input == null) return null;
12+
return input.replace("'", "''");
13+
}
14+
15+
private static String formatObject(ImmutableSqlParameter object) {
16+
if (object == null || object.value() == null) {
17+
return NULL_STRING;
18+
} else if (object.value() instanceof String) {
19+
return "'" + escapeApostrophes((String) object.value()) + "'";
20+
} else {
21+
return object.value().toString();
22+
}
23+
}
24+
25+
private static int countPlaceholders(String sql) {
26+
int count = 0;
27+
for (char c : sql.toCharArray()) {
28+
if (c == '?') {
29+
count++;
30+
}
31+
}
32+
return count;
33+
}
34+
35+
/**
36+
* Interpolates the given SQL string by replacing placeholders with the provided parameters.
37+
*
38+
* <p>This method splits the SQL string by placeholders (question marks) and replaces each
39+
* placeholder with the corresponding parameter from the provided map. The map keys are 1-based
40+
* indexes, aligning with the SQL parameter positions.
41+
*
42+
* @param sql the SQL string containing placeholders ('?') to be replaced.
43+
* @param params a map of parameters where the key is the 1-based index of the placeholder in the
44+
* SQL string, and the value is the corresponding {@link ImmutableSqlParameter}.
45+
* @return the interpolated SQL string with placeholders replaced by the corresponding parameters.
46+
* @throws DatabricksValidationException if the number of placeholders in the SQL string does not
47+
* match the number of parameters provided in the map.
48+
*/
49+
public static String interpolateSQL(String sql, Map<Integer, ImmutableSqlParameter> params)
50+
throws DatabricksValidationException {
51+
String[] parts = sql.split("\\?");
52+
if (countPlaceholders(sql) != params.size()) {
53+
throw new DatabricksValidationException(
54+
"Parameter count does not match. Provide equal number of parameters as placeholders. SQL "
55+
+ sql);
56+
}
57+
StringBuilder sb = new StringBuilder();
58+
for (int i = 0; i < parts.length; i++) {
59+
sb.append(parts[i]);
60+
if (i < params.size()) {
61+
sb.append(formatObject(params.get(i + 1))); // because we have 1 based index in params
62+
}
63+
}
64+
return sb.toString();
65+
}
66+
}

src/main/java/com/databricks/jdbc/core/ArrowToJavaObjectConverter.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@ public static Object convert(Object object, ColumnInfoTypeName requiredType)
7171
// Struct and Array are present in Arrow data in the VARCHAR ValueVector format
7272
case STRUCT:
7373
case ARRAY:
74+
case MAP:
7475
return convertToString(object);
7576
case DATE:
7677
return convertToDate(object);

src/main/java/com/databricks/jdbc/core/DatabricksConnection.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import java.util.stream.Collectors;
1919

2020
/** Implementation for Databricks specific connection. */
21-
public class DatabricksConnection implements IDatabricksConnection, Connection, AutoCloseable {
21+
public class DatabricksConnection implements IDatabricksConnection, Connection {
2222
private IDatabricksSession session;
2323
private final Set<IDatabricksStatement> statementSet = ConcurrentHashMap.newKeySet();
2424
private SQLWarning warnings = null;

0 commit comments

Comments
 (0)