Skip to content

Commit 4b3c0ba

Browse files
committed
merge
2 parents 1b9be61 + f1e7c3c commit 4b3c0ba

File tree

275 files changed

+6765
-747
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

275 files changed

+6765
-747
lines changed

.github/workflows/prCheck.yml

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,29 @@ jobs:
6666
- name: Check Unit Tests
6767
run: mvn test
6868

69+
- name: Install xmllint
70+
if: runner.os == 'Linux'
71+
run: sudo apt-get update && sudo apt-get install -y libxml2-utils
72+
6973
- name: JaCoCo report
7074
run: mvn --batch-mode --errors jacoco:report --file pom.xml
7175

72-
- name: Publish test coverage
73-
uses: codecov/codecov-action@v4
74-
with:
75-
file: target/site/jacoco/jacoco.xml
76-
token: ${{ secrets.CODECOV_TOKEN }}
77-
fail_ci_if_error: true
78-
verbose: true
76+
- name: Extract codeCov percentage
77+
run: |
78+
COVERAGE_FILE="target/site/jacoco/jacoco.xml"
79+
COVERED=$(xmllint --xpath "string(//report/counter[@type='INSTRUCTION']/@covered)" $COVERAGE_FILE)
80+
MISSED=$(xmllint --xpath "string(//report/counter[@type='INSTRUCTION']/@missed)" $COVERAGE_FILE)
81+
TOTAL=$(($COVERED + $MISSED))
82+
PERCENTAGE=$(echo "scale=2; 100 * $COVERED / $TOTAL" | bc)
83+
echo $PERCENTAGE > coverage.txt
84+
echo "::set-output name=coverage::$PERCENTAGE"
85+
- name: Check coverage percentage
86+
run: |
87+
BRANCH_COVERAGE=$(cat coverage.txt)
88+
echo "Branch Coverage: $BRANCH_COVERAGE%"
89+
if (( $(echo "$BRANCH_COVERAGE < 85" | bc -l) )); then
90+
echo "Coverage is less than 85%"
91+
exit 1
92+
else
93+
echo "Coverage is equal to or greater than 85%"
94+
fi

pom.xml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
<annotation.version>1.3.5</annotation.version>
4646
<slt.executor>dbsql</slt.executor>
4747
<slt.token>dummy-token</slt.token>
48+
<wiremock.version>3.5.4</wiremock.version>
4849
</properties>
4950
<dependencies>
5051
<dependency>
@@ -168,6 +169,18 @@
168169
<artifactId>jakarta.annotation-api</artifactId>
169170
<version>${annotation.version}</version>
170171
</dependency>
172+
<dependency>
173+
<groupId>org.wiremock</groupId>
174+
<artifactId>wiremock</artifactId>
175+
<version>${wiremock.version}</version>
176+
<scope>test</scope>
177+
<exclusions>
178+
<exclusion>
179+
<groupId>commons-fileupload</groupId>
180+
<artifactId>commons-fileupload</artifactId>
181+
</exclusion>
182+
</exclusions>
183+
</dependency>
171184
</dependencies>
172185

173186
<build>

src/main/java/com/databricks/jdbc/client/http/DatabricksHttpClient.java

Lines changed: 51 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
package com.databricks.jdbc.client.http;
22

3+
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.FAKE_SERVICE_URI_PROP_SUFFIX;
4+
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.IS_FAKE_SERVICE_TEST_PROP;
5+
36
import com.databricks.jdbc.client.DatabricksHttpException;
47
import com.databricks.jdbc.client.IDatabricksHttpClient;
58
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
@@ -21,10 +24,13 @@
2124
import org.apache.http.client.methods.CloseableHttpResponse;
2225
import org.apache.http.client.methods.HttpUriRequest;
2326
import org.apache.http.client.protocol.HttpClientContext;
27+
import org.apache.http.conn.UnsupportedSchemeException;
28+
import org.apache.http.conn.routing.HttpRoute;
2429
import org.apache.http.impl.client.BasicCredentialsProvider;
2530
import org.apache.http.impl.client.CloseableHttpClient;
2631
import org.apache.http.impl.client.HttpClientBuilder;
2732
import org.apache.http.impl.client.ProxyAuthenticationStrategy;
33+
import org.apache.http.impl.conn.DefaultSchemePortResolver;
2834
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
2935
import org.apache.http.protocol.HttpContext;
3036
import org.slf4j.Logger;
@@ -45,6 +51,8 @@ public class DatabricksHttpClient implements IDatabricksHttpClient {
4551
private static final int MAX_RETRY_INTERVAL = 10 * 1000; // 10s
4652
private static final int DEFAULT_RETRY_COUNT = 5;
4753
private static final String HTTP_GET = "GET";
54+
private static final String SDK_USER_AGENT = "databricks-sdk-java";
55+
private static final String JDBC_HTTP_USER_AGENT = "databricks-jdbc-http";
4856
private static final Set<Integer> RETRYABLE_HTTP_CODES = getRetryableHttpCodes();
4957
protected static final long DEFAULT_IDLE_CONNECTION_TIMEOUT = 5;
5058

@@ -153,7 +161,10 @@ public void process(HttpResponse httpResponse, HttpContext httpContext)
153161
connectionContext.getUseProxyAuth(),
154162
connectionContext.getProxyUser(),
155163
connectionContext.getProxyPassword());
164+
} else if (Boolean.parseBoolean(System.getProperty(IS_FAKE_SERVICE_TEST_PROP))) {
165+
setFakeServiceRouteInHttpClient(builder);
156166
}
167+
157168
return builder.build();
158169
}
159170

@@ -177,6 +188,27 @@ public static void setProxyDetailsInHttpClient(
177188
}
178189
}
179190

191+
@VisibleForTesting
192+
static void setFakeServiceRouteInHttpClient(HttpClientBuilder builder) {
193+
builder.setRoutePlanner(
194+
(host, request, context) -> {
195+
// Get the fake service URI for the target URI and set it as proxy
196+
final HttpHost proxy =
197+
HttpHost.create(System.getProperty(host.toURI() + FAKE_SERVICE_URI_PROP_SUFFIX));
198+
final HttpHost target;
199+
try {
200+
target =
201+
new HttpHost(
202+
host.getHostName(),
203+
DefaultSchemePortResolver.INSTANCE.resolve(host),
204+
host.getSchemeName());
205+
} catch (UnsupportedSchemeException e) {
206+
throw new HttpException(e.getMessage());
207+
}
208+
return new HttpRoute(target, null, proxy, false);
209+
});
210+
}
211+
180212
@VisibleForTesting
181213
static boolean isRetryAllowed(String method) {
182214
// For now, allowing retry only for GET which is idempotent
@@ -233,13 +265,28 @@ static String getUserAgent() {
233265
// Remove the SDK part from user agent
234266
StringBuilder mergedString = new StringBuilder();
235267
for (int i = 0; i < parts.length; i++) {
236-
if (i != 1) { // Skip the second part
268+
if (parts[i].startsWith(SDK_USER_AGENT)) {
269+
mergedString.append(JDBC_HTTP_USER_AGENT);
270+
} else {
237271
mergedString.append(parts[i]);
238-
if (i != parts.length - 1) {
239-
mergedString.append(" "); // Add space between parts
240-
}
272+
}
273+
if (i != parts.length - 1) {
274+
mergedString.append(" "); // Add space between parts
241275
}
242276
}
243277
return mergedString.toString();
244278
}
279+
280+
/** Reset the instance of the http client. This is used for testing purposes only. */
281+
@VisibleForTesting
282+
public static synchronized void resetInstance() {
283+
if (instance != null) {
284+
try {
285+
instance.httpClient.close();
286+
} catch (IOException e) {
287+
LOGGER.error("Caught error while closing http client", e);
288+
}
289+
instance = null;
290+
}
291+
}
245292
}

src/main/java/com/databricks/jdbc/client/impl/helper/CommandConstants.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ public class CommandConstants {
1313
public static final String IN_ABSOLUTE_SCHEMA_SQL = " IN SCHEMA %s";
1414
public static final String IN_ABSOLUTE_TABLE_SQL = " IN TABLE %s";
1515
public static final String SHOW_SCHEMA_IN_CATALOG_SQL = "SHOW SCHEMAS IN %s";
16-
public static final String LIKE_SQL = " LIKE `%s`";
16+
public static final String LIKE_SQL = " LIKE '%s'";
1717
public static final String SCHEMA_LIKE_SQL = " SCHEMA" + LIKE_SQL;
1818
public static final String TABLE_LIKE_SQL = " TABLE" + LIKE_SQL;
1919
public static final String SHOW_TABLES_SQL = "SHOW TABLES" + IN_CATALOG_SQL;

src/main/java/com/databricks/jdbc/client/impl/helper/MetadataResultConstants.java

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,12 @@
55
import java.util.List;
66

77
public class MetadataResultConstants {
8+
public static final String[] DEFAULT_TABLE_TYPES = {"TABLE", "VIEW", "SYSTEM TABLE"};
89
private static final ResultColumn CATALOG_COLUMN =
910
new ResultColumn("TABLE_CAT", "catalogName", Types.VARCHAR);
11+
12+
private static final ResultColumn CATALOG_COLUMN_FOR_GET_CATALOGS =
13+
new ResultColumn("TABLE_CAT", "catalog", Types.VARCHAR);
1014
private static final ResultColumn TYPE_CATALOG_COLUMN =
1115
new ResultColumn("TYPE_CAT", "TYPE_CATALOG_COLUMN", Types.VARCHAR);
1216
private static final ResultColumn TYPE_SCHEMA_COLUMN =
@@ -25,6 +29,10 @@ public class MetadataResultConstants {
2529
new ResultColumn("TYPE_NAME", "TYPE_NAME", Types.VARCHAR);
2630
private static final ResultColumn SCHEMA_COLUMN =
2731
new ResultColumn("TABLE_SCHEM", "namespace", Types.VARCHAR);
32+
33+
private static final ResultColumn SCHEMA_COLUMN_FOR_GET_SCHEMA =
34+
new ResultColumn("TABLE_SCHEM", "databaseName", Types.VARCHAR);
35+
2836
private static final ResultColumn TABLE_NAME_COLUMN =
2937
new ResultColumn("TABLE_NAME", "tableName", Types.VARCHAR);
3038
private static final ResultColumn TABLE_TYPE_COLUMN =
@@ -132,8 +140,9 @@ public class MetadataResultConstants {
132140
ORDINAL_POSITION_COLUMN,
133141
IS_AUTO_INCREMENT_COLUMN,
134142
IS_GENERATED_COLUMN);
135-
public static List<ResultColumn> CATALOG_COLUMNS = List.of(CATALOG_COLUMN);
136-
public static List<ResultColumn> SCHEMA_COLUMNS = List.of(SCHEMA_COLUMN, CATALOG_COLUMN);
143+
public static List<ResultColumn> CATALOG_COLUMNS = List.of(CATALOG_COLUMN_FOR_GET_CATALOGS);
144+
public static List<ResultColumn> SCHEMA_COLUMNS =
145+
List.of(SCHEMA_COLUMN_FOR_GET_SCHEMA, CATALOG_COLUMN);
137146
public static List<ResultColumn> TABLE_COLUMNS =
138147
List.of(
139148
CATALOG_COLUMN,

src/main/java/com/databricks/jdbc/client/impl/helper/MetadataResultSetBuilder.java

Lines changed: 47 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
import com.databricks.jdbc.client.StatementType;
77
import com.databricks.jdbc.core.DatabricksResultSet;
8+
import com.databricks.jdbc.core.DatabricksSQLException;
89
import com.databricks.sdk.service.sql.StatementState;
910
import com.databricks.sdk.service.sql.StatementStatus;
1011
import java.sql.ResultSet;
@@ -29,13 +30,19 @@ public static DatabricksResultSet getCatalogsResult(ResultSet resultSet) throws
2930
return buildResultSet(CATALOG_COLUMNS, rows, GET_CATALOGS_STATEMENT_ID);
3031
}
3132

32-
public static DatabricksResultSet getSchemasResult(ResultSet resultSet) throws SQLException {
33-
List<List<Object>> rows = getRows(resultSet, SCHEMA_COLUMNS);
33+
public static DatabricksResultSet getSchemasResult(ResultSet resultSet, String catalog)
34+
throws SQLException {
35+
List<List<Object>> rows = getRowsForSchemas(resultSet, SCHEMA_COLUMNS, catalog);
3436
return buildResultSet(SCHEMA_COLUMNS, rows, METADATA_STATEMENT_ID);
3537
}
3638

37-
public static DatabricksResultSet getTablesResult(ResultSet resultSet) throws SQLException {
38-
List<List<Object>> rows = getRows(resultSet, TABLE_COLUMNS);
39+
public static DatabricksResultSet getTablesResult(ResultSet resultSet, String[] tableTypes)
40+
throws SQLException {
41+
List<String> allowedTableTypes = List.of(tableTypes);
42+
List<List<Object>> rows =
43+
getRows(resultSet, TABLE_COLUMNS).stream()
44+
.filter(row -> allowedTableTypes.contains(row.get(3))) // Filtering based on table type
45+
.collect(Collectors.toList());
3946
return buildResultSet(TABLE_COLUMNS, rows, GET_TABLES_STATEMENT_ID);
4047
}
4148

@@ -62,8 +69,42 @@ private static List<List<Object>> getRows(ResultSet resultSet, List<ResultColumn
6269
while (resultSet.next()) {
6370
List<Object> row = new ArrayList<>();
6471
for (ResultColumn column : columns) {
65-
Object object = resultSet.getObject(column.getResultSetColumnName());
66-
if (object == null) {
72+
Object object;
73+
try {
74+
object = resultSet.getObject(column.getResultSetColumnName());
75+
if (object == null) {
76+
object = NULL_STRING;
77+
}
78+
} catch (DatabricksSQLException e) {
79+
// Remove non-relevant columns from the obtained result set
80+
object = NULL_STRING;
81+
}
82+
row.add(object);
83+
}
84+
rows.add(row);
85+
}
86+
return rows;
87+
}
88+
89+
private static List<List<Object>> getRowsForSchemas(
90+
ResultSet resultSet, List<ResultColumn> columns, String catalog) throws SQLException {
91+
// TODO(PECO-1677): Remove this method once the server side ResultSet metadata contains catalogs
92+
List<List<Object>> rows = new ArrayList<>();
93+
while (resultSet.next()) {
94+
List<Object> row = new ArrayList<>();
95+
for (ResultColumn column : columns) {
96+
if (column.getColumnName().equals("TABLE_CAT")) {
97+
row.add(catalog);
98+
continue;
99+
}
100+
Object object;
101+
try {
102+
object = resultSet.getObject(column.getResultSetColumnName());
103+
if (object == null) {
104+
object = NULL_STRING;
105+
}
106+
} catch (DatabricksSQLException e) {
107+
// Remove non-relevant columns from the obtained result set
67108
object = NULL_STRING;
68109
}
69110
row.add(object);

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksNewMetadataSdkClient.java

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package com.databricks.jdbc.client.impl.sdk;
22

3+
import static com.databricks.jdbc.client.impl.helper.MetadataResultConstants.DEFAULT_TABLE_TYPES;
34
import static com.databricks.jdbc.client.impl.sdk.ResultConstants.TYPE_INFO_RESULT;
45

56
import com.databricks.jdbc.client.DatabricksMetadataClient;
@@ -41,7 +42,8 @@ public DatabricksResultSet listCatalogs(IDatabricksSession session) throws SQLEx
4142
CommandBuilder commandBuilder = new CommandBuilder(session);
4243
String SQL = commandBuilder.getSQLString(CommandName.LIST_CATALOGS);
4344
LOGGER.debug("SQL command to fetch catalogs: {}", SQL);
44-
return MetadataResultSetBuilder.getCatalogsResult(getResultSet(SQL, session));
45+
return MetadataResultSetBuilder.getCatalogsResult(
46+
getResultSet(SQL, session, StatementType.METADATA));
4547
}
4648

4749
@Override
@@ -51,7 +53,8 @@ public DatabricksResultSet listSchemas(
5153
new CommandBuilder(catalog, session).setSchemaPattern(schemaNamePattern);
5254
String SQL = commandBuilder.getSQLString(CommandName.LIST_SCHEMAS);
5355
LOGGER.debug("SQL command to fetch schemas: {}", SQL);
54-
return MetadataResultSetBuilder.getSchemasResult(getResultSet(SQL, session));
56+
return MetadataResultSetBuilder.getSchemasResult(
57+
getResultSet(SQL, session, StatementType.METADATA), catalog);
5558
}
5659

5760
@Override
@@ -62,12 +65,17 @@ public DatabricksResultSet listTables(
6265
String tableNamePattern,
6366
String[] tableTypes)
6467
throws SQLException {
68+
tableTypes =
69+
Optional.ofNullable(tableTypes)
70+
.filter(types -> types.length > 0)
71+
.orElse(DEFAULT_TABLE_TYPES);
6572
CommandBuilder commandBuilder =
6673
new CommandBuilder(catalog, session)
6774
.setSchemaPattern(schemaNamePattern)
6875
.setTablePattern(tableNamePattern);
6976
String SQL = commandBuilder.getSQLString(CommandName.LIST_TABLES);
70-
return MetadataResultSetBuilder.getTablesResult(getResultSet(SQL, session));
77+
return MetadataResultSetBuilder.getTablesResult(
78+
getResultSet(SQL, session, StatementType.METADATA), tableTypes);
7179
}
7280

7381
@Override
@@ -90,7 +98,8 @@ public DatabricksResultSet listColumns(
9098
.setTablePattern(tableNamePattern)
9199
.setColumnPattern(columnNamePattern);
92100
String SQL = commandBuilder.getSQLString(CommandName.LIST_COLUMNS);
93-
return MetadataResultSetBuilder.getColumnsResult(getResultSet(SQL, session));
101+
return MetadataResultSetBuilder.getColumnsResult(
102+
getResultSet(SQL, session, StatementType.QUERY));
94103
}
95104

96105
@Override
@@ -106,7 +115,8 @@ public DatabricksResultSet listFunctions(
106115
.setFunctionPattern(functionNamePattern);
107116
String SQL = commandBuilder.getSQLString(CommandName.LIST_FUNCTIONS);
108117
LOGGER.debug("SQL command to fetch functions: {}", SQL);
109-
return MetadataResultSetBuilder.getFunctionsResult(getResultSet(SQL, session));
118+
return MetadataResultSetBuilder.getFunctionsResult(
119+
getResultSet(SQL, session, StatementType.METADATA));
110120
}
111121

112122
@Override
@@ -116,15 +126,17 @@ public DatabricksResultSet listPrimaryKeys(
116126
new CommandBuilder(catalog, session).setSchema(schema).setTable(table);
117127
String SQL = commandBuilder.getSQLString(CommandName.LIST_PRIMARY_KEYS);
118128
LOGGER.debug("SQL command to fetch primary keys: {}", SQL);
119-
return MetadataResultSetBuilder.getPrimaryKeysResult(getResultSet(SQL, session));
129+
return MetadataResultSetBuilder.getPrimaryKeysResult(
130+
getResultSet(SQL, session, StatementType.METADATA));
120131
}
121132

122-
private ResultSet getResultSet(String SQL, IDatabricksSession session) throws SQLException {
133+
private ResultSet getResultSet(
134+
String SQL, IDatabricksSession session, StatementType statementType) throws SQLException {
123135
return sdkClient.executeStatement(
124136
SQL,
125137
session.getComputeResource(),
126138
new HashMap<Integer, ImmutableSqlParameter>(),
127-
StatementType.METADATA,
139+
statementType,
128140
session,
129141
null /* parentStatement */);
130142
}

0 commit comments

Comments
 (0)