Skip to content

Commit 6ef1117

Browse files
authored
[PECO-2011] Add method to determine the disposition from result set metadata (#543)
* getIsCloudFetched method added to DatabricksResultSetMetaData.
1 parent aafefb0 commit 6ef1117

File tree

3 files changed

+85
-13
lines changed

3 files changed

+85
-13
lines changed

src/main/java/com/databricks/jdbc/api/impl/DatabricksResultSetMetaData.java

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,12 @@
1010
import com.databricks.jdbc.common.util.WrapperUtil;
1111
import com.databricks.jdbc.log.JdbcLogger;
1212
import com.databricks.jdbc.log.JdbcLoggerFactory;
13-
import com.databricks.jdbc.model.client.thrift.generated.TColumnDesc;
14-
import com.databricks.jdbc.model.client.thrift.generated.TGetResultSetMetadataResp;
15-
import com.databricks.jdbc.model.client.thrift.generated.TTypeEntry;
16-
import com.databricks.jdbc.model.client.thrift.generated.TTypeQualifierValue;
13+
import com.databricks.jdbc.model.client.thrift.generated.*;
1714
import com.databricks.jdbc.model.core.ColumnMetadata;
1815
import com.databricks.jdbc.model.core.ResultManifest;
1916
import com.databricks.sdk.service.sql.ColumnInfo;
2017
import com.databricks.sdk.service.sql.ColumnInfoTypeName;
18+
import com.databricks.sdk.service.sql.Format;
2119
import com.google.common.collect.ImmutableList;
2220
import com.google.common.collect.ImmutableMap;
2321
import java.sql.ResultSetMetaData;
@@ -36,6 +34,7 @@ public class DatabricksResultSetMetaData implements ResultSetMetaData {
3634
private final ImmutableMap<String, Integer> columnNameIndex;
3735
private final long totalRows;
3836
private Long chunkCount;
37+
private final boolean isCloudFetchUsed;
3938

4039
/**
4140
* Constructs a {@code DatabricksResultSetMetaData} object for a SEA result set.
@@ -90,6 +89,7 @@ public DatabricksResultSetMetaData(String statementId, ResultManifest resultMani
9089
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
9190
this.totalRows = resultManifest.getTotalRowCount();
9291
this.chunkCount = resultManifest.getTotalChunkCount();
92+
this.isCloudFetchUsed = getIsCloudFetchFromManifest(resultManifest);
9393
}
9494

9595
/**
@@ -151,6 +151,7 @@ public DatabricksResultSetMetaData(
151151
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
152152
this.totalRows = rows;
153153
this.chunkCount = chunkCount;
154+
this.isCloudFetchUsed = getIsCloudFetchFromManifest(resultManifest);
154155
}
155156

156157
/**
@@ -191,6 +192,7 @@ public DatabricksResultSetMetaData(
191192
this.columns = columnsBuilder.build();
192193
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
193194
this.totalRows = totalRows;
195+
this.isCloudFetchUsed = false;
194196
}
195197

196198
/**
@@ -235,6 +237,7 @@ public DatabricksResultSetMetaData(
235237
this.columns = columnsBuilder.build();
236238
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
237239
this.totalRows = totalRows;
240+
this.isCloudFetchUsed = false;
238241
}
239242

240243
@Override
@@ -376,6 +379,18 @@ public long getTotalRows() {
376379
return totalRows;
377380
}
378381

382+
public boolean getIsCloudFetchUsed() {
383+
return isCloudFetchUsed;
384+
}
385+
386+
private boolean getIsCloudFetchFromManifest(ResultManifest resultManifest) {
387+
return resultManifest.getFormat() == Format.ARROW_STREAM;
388+
}
389+
390+
private boolean getIsCloudFetchFromManifest(TGetResultSetMetadataResp resultManifest) {
391+
return resultManifest.getResultFormat() == TSparkRowSetType.URL_BASED_SET;
392+
}
393+
379394
public Long getChunkCount() {
380395
return chunkCount;
381396
}

src/test/java/com/databricks/client/jdbc/DriverTest.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import com.databricks.jdbc.api.IDatabricksConnection;
44
import com.databricks.jdbc.api.IDatabricksUCVolumeClient;
5+
import com.databricks.jdbc.api.impl.DatabricksResultSetMetaData;
56
import com.databricks.jdbc.api.impl.arrow.ArrowResultChunk;
67
import com.databricks.jdbc.common.DatabricksJdbcConstants;
78
import java.io.File;
@@ -94,6 +95,23 @@ void testGetTablesOSS_Metadata() throws Exception {
9495
con.close();
9596
}
9697

98+
@Test
99+
void testGetDisposition() throws Exception {
100+
DriverManager.registerDriver(new Driver());
101+
DriverManager.drivers().forEach(driver -> System.out.println(driver.getClass()));
102+
String jdbcUrl =
103+
"jdbc:databricks://e2-dogfood.staging.cloud.databricks.com:443/default;transportMode=https;ssl=1;AuthMech=3;httpPath=/sql/1.0/warehouses/791ba2a31c7fd70a;";
104+
Connection con =
105+
DriverManager.getConnection(jdbcUrl, "token", "xx"); // Default connection, arrow enabled.
106+
System.out.println("Connection established with default params. Arrow is enabled ......");
107+
String query = "SELECT * FROM RANGE(10)";
108+
ResultSet resultSet = con.createStatement().executeQuery(query);
109+
DatabricksResultSetMetaData rsmd = (DatabricksResultSetMetaData) resultSet.getMetaData();
110+
System.out.println("isCloudFetchUsed when arrow is enabled: " + rsmd.getIsCloudFetchUsed());
111+
resultSet.close();
112+
con.close();
113+
}
114+
97115
@Test
98116
void testArclight() throws Exception {
99117
DriverManager.registerDriver(new Driver());

src/test/java/com/databricks/jdbc/api/impl/DatabricksResultSetMetaDataTest.java

Lines changed: 48 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,37 @@
11
package com.databricks.jdbc.api.impl;
22

33
import static com.databricks.jdbc.common.util.DatabricksThriftUtil.getTypeFromTypeDesc;
4-
import static org.junit.jupiter.api.Assertions.assertEquals;
5-
import static org.junit.jupiter.api.Assertions.assertFalse;
4+
import static org.junit.jupiter.api.Assertions.*;
65

76
import com.databricks.jdbc.common.DatabricksJdbcConstants;
87
import com.databricks.jdbc.common.util.DatabricksTypeUtil;
98
import com.databricks.jdbc.model.client.thrift.generated.*;
109
import com.databricks.jdbc.model.core.ResultManifest;
11-
import com.databricks.sdk.service.sql.ColumnInfo;
12-
import com.databricks.sdk.service.sql.ColumnInfoTypeName;
13-
import com.databricks.sdk.service.sql.ResultSchema;
10+
import com.databricks.sdk.service.sql.*;
1411
import java.sql.ResultSetMetaData;
1512
import java.sql.SQLException;
16-
import java.util.Collections;
17-
import java.util.HashMap;
18-
import java.util.List;
19-
import java.util.Map;
13+
import java.util.*;
14+
import java.util.stream.Stream;
2015
import org.junit.jupiter.api.Assertions;
2116
import org.junit.jupiter.api.Test;
17+
import org.junit.jupiter.params.ParameterizedTest;
18+
import org.junit.jupiter.params.provider.MethodSource;
2219

2320
public class DatabricksResultSetMetaDataTest {
2421
private static final String STATEMENT_ID = "statementId";
2522

23+
static Stream<TSparkRowSetType> thriftResultFormats() {
24+
return Stream.of(
25+
TSparkRowSetType.ARROW_BASED_SET,
26+
TSparkRowSetType.COLUMN_BASED_SET,
27+
TSparkRowSetType.ROW_BASED_SET,
28+
TSparkRowSetType.URL_BASED_SET);
29+
}
30+
31+
static Stream<Format> sdkResultFormats() {
32+
return Stream.of(Format.ARROW_STREAM, Format.CSV, Format.JSON_ARRAY);
33+
}
34+
2635
public ColumnInfo getColumn(String name, ColumnInfoTypeName typeName, String typeText) {
2736
ColumnInfo columnInfo = new ColumnInfo();
2837
columnInfo.setName(name);
@@ -225,6 +234,36 @@ public void testGetScaleAndPrecisionWithTColumnDesc() {
225234
assertEquals(0, scaleAndPrecision[1]);
226235
}
227236

237+
@ParameterizedTest
238+
@MethodSource("thriftResultFormats")
239+
public void testGetDispositionThrift(TSparkRowSetType resultFormat) {
240+
TGetResultSetMetadataResp thriftResultManifest = getThriftResultManifest();
241+
thriftResultManifest.setResultFormat(resultFormat);
242+
DatabricksResultSetMetaData metaData =
243+
new DatabricksResultSetMetaData(STATEMENT_ID, thriftResultManifest, 1, 1);
244+
245+
if (resultFormat == TSparkRowSetType.URL_BASED_SET) {
246+
assertTrue(metaData.getIsCloudFetchUsed());
247+
} else {
248+
assertFalse(metaData.getIsCloudFetchUsed());
249+
}
250+
}
251+
252+
@ParameterizedTest
253+
@MethodSource("sdkResultFormats")
254+
public void testDispositionSdk(Format format) {
255+
ResultManifest resultManifest = getResultManifest();
256+
resultManifest.setFormat(format);
257+
DatabricksResultSetMetaData metaData =
258+
new DatabricksResultSetMetaData(STATEMENT_ID, resultManifest);
259+
260+
if (format == Format.ARROW_STREAM) {
261+
assertTrue(metaData.getIsCloudFetchUsed());
262+
} else {
263+
assertFalse(metaData.getIsCloudFetchUsed());
264+
}
265+
}
266+
228267
private void verifyDefaultMetadataProperties(DatabricksResultSetMetaData metaData)
229268
throws SQLException {
230269
for (int i = 1; i <= metaData.getColumnCount(); i++) {

0 commit comments

Comments
 (0)