Skip to content

Commit f8fda4a

Browse files
authored
Merge branch 'main' into abortstatement
2 parents fefda54 + 1aa675f commit f8fda4a

File tree

219 files changed

+5502
-207
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

219 files changed

+5502
-207
lines changed

.github/workflows/prCheck.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ jobs:
6464
restore-keys: ${{ runner.os }}-m2
6565

6666
- name: Check Unit Tests
67-
run: mvn test
67+
run: mvn test -Dtest=!**/integration/**,!**/local/**
6868

6969
- name: Install xmllint
7070
if: runner.os == 'Linux'

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@
204204
<version>${maven-surefire-plugin.version}</version>
205205
<configuration>
206206
<excludes>
207-
<exclude>**/**IntegrationTests.java</exclude>
207+
<exclude>**/integration/**/*.java</exclude>
208208
<exclude>**/*MetadataBenchmarkingTest.java</exclude>
209209
</excludes>
210210
<argLine>

src/main/java/com/databricks/jdbc/client/impl/helper/MetadataResultConstants.java

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,12 @@
55
import java.util.List;
66

77
public class MetadataResultConstants {
8+
public static final String[] DEFAULT_TABLE_TYPES = {"TABLE", "VIEW", "SYSTEM TABLE"};
89
private static final ResultColumn CATALOG_COLUMN =
910
new ResultColumn("TABLE_CAT", "catalogName", Types.VARCHAR);
11+
12+
private static final ResultColumn CATALOG_COLUMN_FOR_GET_CATALOGS =
13+
new ResultColumn("TABLE_CAT", "catalog", Types.VARCHAR);
1014
private static final ResultColumn TYPE_CATALOG_COLUMN =
1115
new ResultColumn("TYPE_CAT", "TYPE_CATALOG_COLUMN", Types.VARCHAR);
1216
private static final ResultColumn TYPE_SCHEMA_COLUMN =
@@ -25,6 +29,10 @@ public class MetadataResultConstants {
2529
new ResultColumn("TYPE_NAME", "TYPE_NAME", Types.VARCHAR);
2630
private static final ResultColumn SCHEMA_COLUMN =
2731
new ResultColumn("TABLE_SCHEM", "namespace", Types.VARCHAR);
32+
33+
private static final ResultColumn SCHEMA_COLUMN_FOR_GET_SCHEMA =
34+
new ResultColumn("TABLE_SCHEM", "databaseName", Types.VARCHAR);
35+
2836
private static final ResultColumn TABLE_NAME_COLUMN =
2937
new ResultColumn("TABLE_NAME", "tableName", Types.VARCHAR);
3038
private static final ResultColumn TABLE_TYPE_COLUMN =
@@ -132,8 +140,9 @@ public class MetadataResultConstants {
132140
ORDINAL_POSITION_COLUMN,
133141
IS_AUTO_INCREMENT_COLUMN,
134142
IS_GENERATED_COLUMN);
135-
public static List<ResultColumn> CATALOG_COLUMNS = List.of(CATALOG_COLUMN);
136-
public static List<ResultColumn> SCHEMA_COLUMNS = List.of(SCHEMA_COLUMN, CATALOG_COLUMN);
143+
public static List<ResultColumn> CATALOG_COLUMNS = List.of(CATALOG_COLUMN_FOR_GET_CATALOGS);
144+
public static List<ResultColumn> SCHEMA_COLUMNS =
145+
List.of(SCHEMA_COLUMN_FOR_GET_SCHEMA, CATALOG_COLUMN);
137146
public static List<ResultColumn> TABLE_COLUMNS =
138147
List.of(
139148
CATALOG_COLUMN,

src/main/java/com/databricks/jdbc/client/impl/helper/MetadataResultSetBuilder.java

Lines changed: 38 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,19 @@ public static DatabricksResultSet getCatalogsResult(ResultSet resultSet) throws
3030
return buildResultSet(CATALOG_COLUMNS, rows, GET_CATALOGS_STATEMENT_ID);
3131
}
3232

33-
public static DatabricksResultSet getSchemasResult(ResultSet resultSet) throws SQLException {
34-
List<List<Object>> rows = getRows(resultSet, SCHEMA_COLUMNS);
33+
public static DatabricksResultSet getSchemasResult(ResultSet resultSet, String catalog)
34+
throws SQLException {
35+
List<List<Object>> rows = getRowsForSchemas(resultSet, SCHEMA_COLUMNS, catalog);
3536
return buildResultSet(SCHEMA_COLUMNS, rows, METADATA_STATEMENT_ID);
3637
}
3738

38-
public static DatabricksResultSet getTablesResult(ResultSet resultSet) throws SQLException {
39-
List<List<Object>> rows = getRows(resultSet, TABLE_COLUMNS);
39+
public static DatabricksResultSet getTablesResult(ResultSet resultSet, String[] tableTypes)
40+
throws SQLException {
41+
List<String> allowedTableTypes = List.of(tableTypes);
42+
List<List<Object>> rows =
43+
getRows(resultSet, TABLE_COLUMNS).stream()
44+
.filter(row -> allowedTableTypes.contains(row.get(3))) // Filtering based on table type
45+
.collect(Collectors.toList());
4046
return buildResultSet(TABLE_COLUMNS, rows, GET_TABLES_STATEMENT_ID);
4147
}
4248

@@ -80,6 +86,34 @@ private static List<List<Object>> getRows(ResultSet resultSet, List<ResultColumn
8086
return rows;
8187
}
8288

89+
private static List<List<Object>> getRowsForSchemas(
90+
ResultSet resultSet, List<ResultColumn> columns, String catalog) throws SQLException {
91+
// TODO(PECO-1677): Remove this method once the server side ResultSet metadata contains catalogs
92+
List<List<Object>> rows = new ArrayList<>();
93+
while (resultSet.next()) {
94+
List<Object> row = new ArrayList<>();
95+
for (ResultColumn column : columns) {
96+
if (column.getColumnName().equals("TABLE_CAT")) {
97+
row.add(catalog);
98+
continue;
99+
}
100+
Object object;
101+
try {
102+
object = resultSet.getObject(column.getResultSetColumnName());
103+
if (object == null) {
104+
object = NULL_STRING;
105+
}
106+
} catch (DatabricksSQLException e) {
107+
// Remove non-relevant columns from the obtained result set
108+
object = NULL_STRING;
109+
}
110+
row.add(object);
111+
}
112+
rows.add(row);
113+
}
114+
return rows;
115+
}
116+
83117
private static DatabricksResultSet buildResultSet(
84118
List<ResultColumn> columns, List<List<Object>> rows, String statementId) {
85119
return new DatabricksResultSet(

src/main/java/com/databricks/jdbc/client/impl/sdk/DatabricksNewMetadataSdkClient.java

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package com.databricks.jdbc.client.impl.sdk;
22

3+
import static com.databricks.jdbc.client.impl.helper.MetadataResultConstants.DEFAULT_TABLE_TYPES;
34
import static com.databricks.jdbc.client.impl.sdk.ResultConstants.TYPE_INFO_RESULT;
45

56
import com.databricks.jdbc.client.DatabricksMetadataClient;
@@ -41,7 +42,8 @@ public DatabricksResultSet listCatalogs(IDatabricksSession session) throws SQLEx
4142
CommandBuilder commandBuilder = new CommandBuilder(session);
4243
String SQL = commandBuilder.getSQLString(CommandName.LIST_CATALOGS);
4344
LOGGER.debug("SQL command to fetch catalogs: {}", SQL);
44-
return MetadataResultSetBuilder.getCatalogsResult(getResultSet(SQL, session));
45+
return MetadataResultSetBuilder.getCatalogsResult(
46+
getResultSet(SQL, session, StatementType.METADATA));
4547
}
4648

4749
@Override
@@ -51,7 +53,8 @@ public DatabricksResultSet listSchemas(
5153
new CommandBuilder(catalog, session).setSchemaPattern(schemaNamePattern);
5254
String SQL = commandBuilder.getSQLString(CommandName.LIST_SCHEMAS);
5355
LOGGER.debug("SQL command to fetch schemas: {}", SQL);
54-
return MetadataResultSetBuilder.getSchemasResult(getResultSet(SQL, session));
56+
return MetadataResultSetBuilder.getSchemasResult(
57+
getResultSet(SQL, session, StatementType.METADATA), catalog);
5558
}
5659

5760
@Override
@@ -62,12 +65,17 @@ public DatabricksResultSet listTables(
6265
String tableNamePattern,
6366
String[] tableTypes)
6467
throws SQLException {
68+
tableTypes =
69+
Optional.ofNullable(tableTypes)
70+
.filter(types -> types.length > 0)
71+
.orElse(DEFAULT_TABLE_TYPES);
6572
CommandBuilder commandBuilder =
6673
new CommandBuilder(catalog, session)
6774
.setSchemaPattern(schemaNamePattern)
6875
.setTablePattern(tableNamePattern);
6976
String SQL = commandBuilder.getSQLString(CommandName.LIST_TABLES);
70-
return MetadataResultSetBuilder.getTablesResult(getResultSet(SQL, session));
77+
return MetadataResultSetBuilder.getTablesResult(
78+
getResultSet(SQL, session, StatementType.METADATA), tableTypes);
7179
}
7280

7381
@Override
@@ -90,7 +98,8 @@ public DatabricksResultSet listColumns(
9098
.setTablePattern(tableNamePattern)
9199
.setColumnPattern(columnNamePattern);
92100
String SQL = commandBuilder.getSQLString(CommandName.LIST_COLUMNS);
93-
return MetadataResultSetBuilder.getColumnsResult(getResultSet(SQL, session));
101+
return MetadataResultSetBuilder.getColumnsResult(
102+
getResultSet(SQL, session, StatementType.QUERY));
94103
}
95104

96105
@Override
@@ -106,7 +115,8 @@ public DatabricksResultSet listFunctions(
106115
.setFunctionPattern(functionNamePattern);
107116
String SQL = commandBuilder.getSQLString(CommandName.LIST_FUNCTIONS);
108117
LOGGER.debug("SQL command to fetch functions: {}", SQL);
109-
return MetadataResultSetBuilder.getFunctionsResult(getResultSet(SQL, session));
118+
return MetadataResultSetBuilder.getFunctionsResult(
119+
getResultSet(SQL, session, StatementType.METADATA));
110120
}
111121

112122
@Override
@@ -116,15 +126,17 @@ public DatabricksResultSet listPrimaryKeys(
116126
new CommandBuilder(catalog, session).setSchema(schema).setTable(table);
117127
String SQL = commandBuilder.getSQLString(CommandName.LIST_PRIMARY_KEYS);
118128
LOGGER.debug("SQL command to fetch primary keys: {}", SQL);
119-
return MetadataResultSetBuilder.getPrimaryKeysResult(getResultSet(SQL, session));
129+
return MetadataResultSetBuilder.getPrimaryKeysResult(
130+
getResultSet(SQL, session, StatementType.METADATA));
120131
}
121132

122-
private ResultSet getResultSet(String SQL, IDatabricksSession session) throws SQLException {
133+
private ResultSet getResultSet(
134+
String SQL, IDatabricksSession session, StatementType statementType) throws SQLException {
123135
return sdkClient.executeStatement(
124136
SQL,
125137
session.getComputeResource(),
126138
new HashMap<Integer, ImmutableSqlParameter>(),
127-
StatementType.METADATA,
139+
statementType,
128140
session,
129141
null /* parentStatement */);
130142
}

src/main/java/com/databricks/jdbc/core/DatabricksResultSet.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,9 @@ public DatabricksResultSet(
100100
this.executionResult =
101101
ExecutionResultFactory.getResultSet(resultData, resultManifest, statementId, session);
102102
int rowSize = getRowCount(resultData);
103-
this.resultSetMetaData = new DatabricksResultSetMetaData(statementId, resultManifest, rowSize);
103+
this.resultSetMetaData =
104+
new DatabricksResultSetMetaData(
105+
statementId, resultManifest, rowSize, resultData.getResultLinksSize());
104106
this.statementType = statementType;
105107
this.updateCount = null;
106108
this.parentStatement = parentStatement;

src/main/java/com/databricks/jdbc/core/DatabricksResultSetMetaData.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ public class DatabricksResultSetMetaData implements ResultSetMetaData {
3030
private final ImmutableList<ImmutableDatabricksColumn> columns;
3131
private final ImmutableMap<String, Integer> columnNameIndex;
3232
private final long totalRows;
33+
private Long chunkCount;
3334
private static final String DEFAULT_CATALOGUE_NAME = "Spark";
3435
private static final String NULL_STRING = "null";
3536

@@ -80,10 +81,11 @@ public DatabricksResultSetMetaData(
8081
this.columns = columnsBuilder.build();
8182
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
8283
this.totalRows = resultManifest.getTotalRowCount();
84+
this.chunkCount = resultManifest.getTotalChunkCount();
8385
}
8486

8587
public DatabricksResultSetMetaData(
86-
String statementId, TGetResultSetMetadataResp resultManifest, int rows) {
88+
String statementId, TGetResultSetMetadataResp resultManifest, int rows, long chunkCount) {
8789
this.statementId = statementId;
8890
Map<String, Integer> columnNameToIndexMap = new HashMap<>();
8991
ImmutableList.Builder<ImmutableDatabricksColumn> columnsBuilder = ImmutableList.builder();
@@ -108,6 +110,7 @@ public DatabricksResultSetMetaData(
108110
this.columns = columnsBuilder.build();
109111
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
110112
this.totalRows = rows;
113+
this.chunkCount = chunkCount;
111114
}
112115

113116
public DatabricksResultSetMetaData(
@@ -283,6 +286,10 @@ public long getTotalRows() {
283286
return totalRows;
284287
}
285288

289+
public Long getChunkCount() {
290+
return chunkCount;
291+
}
292+
286293
private ImmutableDatabricksColumn.Builder getColumnBuilder() {
287294
return ImmutableDatabricksColumn.builder()
288295
.isAutoIncrement(false)

src/test/java/com/databricks/jdbc/client/impl/DatabricksNewMetadataSdkClientTest.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -202,8 +202,11 @@ void testListTables(
202202
.thenReturn(mockedResultSet);
203203
when(mockedResultSet.next()).thenReturn(true, false);
204204
for (ResultColumn resultColumn : TABLE_COLUMNS) {
205-
when(mockedResultSet.getObject(resultColumn.getResultSetColumnName()))
206-
.thenReturn(TEST_COLUMN);
205+
if (resultColumn == TABLE_COLUMNS.get(3)) {
206+
when(mockedResultSet.getObject(resultColumn.getResultSetColumnName())).thenReturn("TABLE");
207+
} else
208+
when(mockedResultSet.getObject(resultColumn.getResultSetColumnName()))
209+
.thenReturn(TEST_COLUMN);
207210
}
208211
DatabricksResultSet actualResult =
209212
metadataClient.listTables(session, catalog, schema, table, null);
@@ -231,7 +234,7 @@ void testListColumns(
231234
sqlStatement,
232235
mockedComputeResource,
233236
new HashMap<Integer, ImmutableSqlParameter>(),
234-
StatementType.METADATA,
237+
StatementType.QUERY,
235238
session,
236239
null))
237240
.thenReturn(mockedResultSet);
@@ -264,10 +267,7 @@ void testListSchemas(String sqlStatement, String schema, String description) thr
264267
null))
265268
.thenReturn(mockedResultSet);
266269
when(mockedResultSet.next()).thenReturn(true, false);
267-
for (ResultColumn resultColumn : SCHEMA_COLUMNS) {
268-
when(mockedResultSet.getObject(resultColumn.getResultSetColumnName()))
269-
.thenReturn(TEST_COLUMN);
270-
}
270+
when(mockedResultSet.getObject("databaseName")).thenReturn(TEST_COLUMN);
271271
DatabricksResultSet actualResult = metadataClient.listSchemas(session, TEST_CATALOG, schema);
272272
assertEquals(
273273
actualResult.getStatementStatus().getState(), StatementState.SUCCEEDED, description);

src/test/java/com/databricks/jdbc/client/impl/thrift/commons/DatabricksThriftAccessorTest.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import com.databricks.sdk.core.DatabricksConfig;
1919
import com.databricks.sdk.service.sql.StatementState;
2020
import java.sql.SQLException;
21+
import java.util.ArrayList;
2122
import java.util.Collections;
2223
import org.apache.thrift.TException;
2324
import org.apache.thrift.protocol.TProtocol;
@@ -46,11 +47,16 @@ public class DatabricksThriftAccessorTest {
4647
.setMaxBytes(DEFAULT_BYTE_LIMIT);
4748
private static final TGetResultSetMetadataReq resultSetMetadataReq =
4849
new TGetResultSetMetadataReq().setOperationHandle(tOperationHandle);
50+
51+
private static final TRowSet rowSet = new TRowSet().setResultLinks(new ArrayList<>(2));
52+
4953
private static final TFetchResultsResp response =
5054
new TFetchResultsResp()
5155
.setStatus(new TStatus().setStatusCode(TStatusCode.SUCCESS_STATUS))
5256
.setResultSetMetadata(
53-
new TGetResultSetMetadataResp().setResultFormat(TSparkRowSetType.COLUMN_BASED_SET));
57+
new TGetResultSetMetadataResp().setResultFormat(TSparkRowSetType.COLUMN_BASED_SET))
58+
.setResults(rowSet);
59+
5460
private static final TSparkDirectResults directResults =
5561
new TSparkDirectResults()
5662
.setResultSet(response)

src/test/java/com/databricks/jdbc/core/DatabricksResultSetMetaDataTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ public void testColumnsForVolumeOperation() throws SQLException {
101101
@Test
102102
public void testThriftColumns() throws SQLException {
103103
DatabricksResultSetMetaData metaData =
104-
new DatabricksResultSetMetaData(STATEMENT_ID, getThriftResultManifest(), 10);
104+
new DatabricksResultSetMetaData(STATEMENT_ID, getThriftResultManifest(), 10, 1);
105105
Assertions.assertEquals(10, metaData.getTotalRows());
106106
Assertions.assertEquals(1, metaData.getColumnCount());
107107
Assertions.assertEquals("testCol", metaData.getColumnName(1));
@@ -111,7 +111,7 @@ public void testThriftColumns() throws SQLException {
111111
public void testEmptyAndNullThriftColumns() throws SQLException {
112112
TGetResultSetMetadataResp resultSetMetadataResp = new TGetResultSetMetadataResp();
113113
DatabricksResultSetMetaData metaData =
114-
new DatabricksResultSetMetaData(STATEMENT_ID, resultSetMetadataResp, 0);
114+
new DatabricksResultSetMetaData(STATEMENT_ID, resultSetMetadataResp, 0, 1);
115115
Assertions.assertEquals(0, metaData.getColumnCount());
116116

117117
resultSetMetadataResp.setSchema(new TTableSchema());

0 commit comments

Comments
 (0)