Skip to content

Commit da8b518

Browse files
Merge pull request #176 from agnipratimnag-db/agnipratim_peco-1642
[PECO-1642] Added configurability of Polling Interval
2 parents 5fc4a26 + 530c889 commit da8b518

File tree

5 files changed

+29
-2
lines changed

5 files changed

+29
-2
lines changed

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksSdkClient.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ public class DatabricksSdkClient implements DatabricksClient {
3737
private static final Logger LOGGER = LoggerFactory.getLogger(DatabricksSdkClient.class);
3838
private static final String SYNC_TIMEOUT_VALUE = "10s";
3939
private static final String ASYNC_TIMEOUT_VALUE = "0s";
40-
private static final int STATEMENT_RESULT_POLL_INTERVAL_MILLIS = 200;
4140

4241
private final IDatabricksConnectionContext connectionContext;
4342
private final DatabricksConfig databricksConfig;
@@ -160,7 +159,7 @@ public DatabricksResultSet executeStatement(
160159
while (responseState == StatementState.PENDING || responseState == StatementState.RUNNING) {
161160
if (pollCount > 0) { // First poll happens without a delay
162161
try {
163-
Thread.sleep(STATEMENT_RESULT_POLL_INTERVAL_MILLIS); // TODO: make this configurable
162+
Thread.sleep(this.connectionContext.getAsyncExecPollInterval());
164163
} catch (InterruptedException e) {
165164
throw new DatabricksTimeoutException("Thread interrupted due to statement timeout");
166165
}

src/main/java/com/databricks/jdbc/driver/DatabricksConnectionContext.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,13 @@ public String getToken() {
172172
: getParameter(DatabricksJdbcConstants.PWD);
173173
}
174174

175+
@Override
176+
public int getAsyncExecPollInterval() {
177+
return getParameter(POLL_INTERVAL) == null
178+
? POLL_INTERVAL_DEFAULT
179+
: Integer.parseInt(getParameter(DatabricksJdbcConstants.POLL_INTERVAL));
180+
}
181+
175182
public String getCloud() throws DatabricksParsingException {
176183
String hostURL = getHostUrl();
177184
if (hostURL.contains("azuredatabricks.net")

src/main/java/com/databricks/jdbc/driver/DatabricksJdbcConstants.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,9 @@ public final class DatabricksJdbcConstants {
5858
static final String UID = "uid";
5959
static final String PWD = "pwd";
6060

61+
static final String POLL_INTERVAL = "asyncexecpollinterval";
62+
static final int POLL_INTERVAL_DEFAULT = 200;
63+
6164
static final String AWS_CLIENT_ID = "databricks-sql-jdbc";
6265

6366
static final String AAD_CLIENT_ID = "96eecda7-19ea-49cc-abb5-240097d554f5";

src/main/java/com/databricks/jdbc/driver/IDatabricksConnectionContext.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,5 +114,7 @@ public static AuthMech parseAuthMech(String authMech) {
114114

115115
String getEndpointURL() throws DatabricksParsingException;
116116

117+
int getAsyncExecPollInterval();
118+
117119
DatabricksClientType getClientType();
118120
}

src/test/java/com/databricks/jdbc/driver/DatabricksConnectionContextTest.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,10 @@ class DatabricksConnectionContextTest {
5252
"jdbc:databricks://e2-dogfood.staging.cloud.databricks.com:443/default;transportMode=http;ssl=1;AuthMech=3;httpPath=/sql/1.0/warehouses/5c89f447c476a5a8;UseSystemProxy=1;UseProxy=1;ProxyHost=127.0.0.1;ProxyPort=8080;ProxyAuth=1;ProxyUID=proxyUser;ProxyPwd=proxyPassword;UseCFProxy=1;CFProxyHost=127.0.1.2;CFProxyPort=8081;CFProxyAuth=1;CFProxyUID=cfProxyUser;CFProxyPwd=cfProxyPassword;";
5353

5454
private static Properties properties = new Properties();
55+
56+
private static final String VALID_URL_POLLING =
57+
"jdbc:databricks://e2-dogfood.staging.cloud.databricks.com:4473;ssl=1;asyncexecpollinterval=500;AuthMech=3;httpPath=/sql/1.0/warehouses/5c89f447c476a5a8;QueryResultCompressionType=1";
58+
5559
private static Properties properties_with_pwd = new Properties();
5660

5761
@BeforeAll
@@ -246,6 +250,18 @@ public void testParsingOfUrlWithoutDefault() throws DatabricksSQLException {
246250
assertEquals("INFO", connectionContext.getLogLevelString());
247251
}
248252

253+
@Test
254+
public void testPollingInterval() throws DatabricksSQLException {
255+
DatabricksConnectionContext connectionContext =
256+
(DatabricksConnectionContext) DatabricksConnectionContext.parse(VALID_URL_5, properties);
257+
assertEquals(200, connectionContext.getAsyncExecPollInterval());
258+
259+
DatabricksConnectionContext connectionContextWithPoll =
260+
(DatabricksConnectionContext)
261+
DatabricksConnectionContext.parse(VALID_URL_POLLING, properties);
262+
assertEquals(500, connectionContextWithPoll.getAsyncExecPollInterval());
263+
}
264+
249265
@Test
250266
public void testParsingOfUrlWithProxy() throws DatabricksSQLException {
251267
IDatabricksConnectionContext connectionContext =

0 commit comments

Comments
 (0)