Skip to content

Commit 1aa675f

Browse files
Add multi-chunk execution test (#181)
* Add multi-chunk execution test * Store the large chunk body as separate binary file * Improve the assertions in multi-chunk test case
1 parent eea66f1 commit 1aa675f

File tree

16 files changed

+472
-13
lines changed

16 files changed

+472
-13
lines changed

src/main/java/com/databricks/jdbc/core/DatabricksResultSet.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,9 @@ public DatabricksResultSet(
100100
this.executionResult =
101101
ExecutionResultFactory.getResultSet(resultData, resultManifest, statementId, session);
102102
int rowSize = getRowCount(resultData);
103-
this.resultSetMetaData = new DatabricksResultSetMetaData(statementId, resultManifest, rowSize);
103+
this.resultSetMetaData =
104+
new DatabricksResultSetMetaData(
105+
statementId, resultManifest, rowSize, resultData.getResultLinksSize());
104106
this.statementType = statementType;
105107
this.updateCount = null;
106108
this.parentStatement = parentStatement;

src/main/java/com/databricks/jdbc/core/DatabricksResultSetMetaData.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ public class DatabricksResultSetMetaData implements ResultSetMetaData {
3030
private final ImmutableList<ImmutableDatabricksColumn> columns;
3131
private final ImmutableMap<String, Integer> columnNameIndex;
3232
private final long totalRows;
33+
private Long chunkCount;
3334
private static final String DEFAULT_CATALOGUE_NAME = "Spark";
3435
private static final String NULL_STRING = "null";
3536

@@ -80,10 +81,11 @@ public DatabricksResultSetMetaData(
8081
this.columns = columnsBuilder.build();
8182
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
8283
this.totalRows = resultManifest.getTotalRowCount();
84+
this.chunkCount = resultManifest.getTotalChunkCount();
8385
}
8486

8587
public DatabricksResultSetMetaData(
86-
String statementId, TGetResultSetMetadataResp resultManifest, int rows) {
88+
String statementId, TGetResultSetMetadataResp resultManifest, int rows, long chunkCount) {
8789
this.statementId = statementId;
8890
Map<String, Integer> columnNameToIndexMap = new HashMap<>();
8991
ImmutableList.Builder<ImmutableDatabricksColumn> columnsBuilder = ImmutableList.builder();
@@ -108,6 +110,7 @@ public DatabricksResultSetMetaData(
108110
this.columns = columnsBuilder.build();
109111
this.columnNameIndex = ImmutableMap.copyOf(columnNameToIndexMap);
110112
this.totalRows = rows;
113+
this.chunkCount = chunkCount;
111114
}
112115

113116
public DatabricksResultSetMetaData(
@@ -283,6 +286,10 @@ public long getTotalRows() {
283286
return totalRows;
284287
}
285288

289+
public Long getChunkCount() {
290+
return chunkCount;
291+
}
292+
286293
private ImmutableDatabricksColumn.Builder getColumnBuilder() {
287294
return ImmutableDatabricksColumn.builder()
288295
.isAutoIncrement(false)

src/test/java/com/databricks/jdbc/client/impl/thrift/commons/DatabricksThriftAccessorTest.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import com.databricks.sdk.core.DatabricksConfig;
1919
import com.databricks.sdk.service.sql.StatementState;
2020
import java.sql.SQLException;
21+
import java.util.ArrayList;
2122
import java.util.Collections;
2223
import org.apache.thrift.TException;
2324
import org.apache.thrift.protocol.TProtocol;
@@ -46,11 +47,16 @@ public class DatabricksThriftAccessorTest {
4647
.setMaxBytes(DEFAULT_BYTE_LIMIT);
4748
private static final TGetResultSetMetadataReq resultSetMetadataReq =
4849
new TGetResultSetMetadataReq().setOperationHandle(tOperationHandle);
50+
51+
private static final TRowSet rowSet = new TRowSet().setResultLinks(new ArrayList<>(2));
52+
4953
private static final TFetchResultsResp response =
5054
new TFetchResultsResp()
5155
.setStatus(new TStatus().setStatusCode(TStatusCode.SUCCESS_STATUS))
5256
.setResultSetMetadata(
53-
new TGetResultSetMetadataResp().setResultFormat(TSparkRowSetType.COLUMN_BASED_SET));
57+
new TGetResultSetMetadataResp().setResultFormat(TSparkRowSetType.COLUMN_BASED_SET))
58+
.setResults(rowSet);
59+
5460
private static final TSparkDirectResults directResults =
5561
new TSparkDirectResults()
5662
.setResultSet(response)

src/test/java/com/databricks/jdbc/core/DatabricksResultSetMetaDataTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ public void testColumnsForVolumeOperation() throws SQLException {
101101
@Test
102102
public void testThriftColumns() throws SQLException {
103103
DatabricksResultSetMetaData metaData =
104-
new DatabricksResultSetMetaData(STATEMENT_ID, getThriftResultManifest(), 10);
104+
new DatabricksResultSetMetaData(STATEMENT_ID, getThriftResultManifest(), 10, 1);
105105
Assertions.assertEquals(10, metaData.getTotalRows());
106106
Assertions.assertEquals(1, metaData.getColumnCount());
107107
Assertions.assertEquals("testCol", metaData.getColumnName(1));
@@ -111,7 +111,7 @@ public void testThriftColumns() throws SQLException {
111111
public void testEmptyAndNullThriftColumns() throws SQLException {
112112
TGetResultSetMetadataResp resultSetMetadataResp = new TGetResultSetMetadataResp();
113113
DatabricksResultSetMetaData metaData =
114-
new DatabricksResultSetMetaData(STATEMENT_ID, resultSetMetadataResp, 0);
114+
new DatabricksResultSetMetaData(STATEMENT_ID, resultSetMetadataResp, 0, 1);
115115
Assertions.assertEquals(0, metaData.getColumnCount());
116116

117117
resultSetMetadataResp.setSchema(new TTableSchema());

src/test/java/com/databricks/jdbc/integration/fakeservice/FakeServiceExtension.java

Lines changed: 59 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,30 @@
22

33
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.FAKE_SERVICE_URI_PROP_SUFFIX;
44
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.IS_FAKE_SERVICE_TEST_PROP;
5+
import static com.github.tomakehurst.wiremock.common.AbstractFileSource.byFileExtension;
56

67
import com.databricks.jdbc.client.http.DatabricksHttpClient;
78
import com.databricks.jdbc.driver.DatabricksJdbcConstants.FakeServiceType;
89
import com.databricks.jdbc.integration.IntegrationTestUtil;
10+
import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder;
11+
import com.github.tomakehurst.wiremock.common.BinaryFile;
12+
import com.github.tomakehurst.wiremock.common.ContentTypes;
913
import com.github.tomakehurst.wiremock.common.SingleRootFileSource;
14+
import com.github.tomakehurst.wiremock.common.TextFile;
15+
import com.github.tomakehurst.wiremock.http.ContentTypeHeader;
16+
import com.github.tomakehurst.wiremock.http.HttpHeaders;
1017
import com.github.tomakehurst.wiremock.junit5.WireMockRuntimeInfo;
1118
import com.github.tomakehurst.wiremock.recording.RecordSpecBuilder;
1219
import com.github.tomakehurst.wiremock.standalone.JsonFileMappingsSource;
1320
import com.github.tomakehurst.wiremock.stubbing.StubMapping;
21+
import com.github.tomakehurst.wiremock.stubbing.StubMappingCollection;
1422
import java.io.IOException;
1523
import java.nio.file.Files;
1624
import java.nio.file.Path;
1725
import java.nio.file.Paths;
1826
import java.util.List;
27+
import java.util.Optional;
28+
import java.util.stream.Collectors;
1929
import java.util.stream.Stream;
2030
import org.jetbrains.annotations.NotNull;
2131
import org.junit.jupiter.api.extension.ExtensionContext;
@@ -67,6 +77,9 @@ public class FakeServiceExtension extends DatabricksWireMockExtension {
6777
*/
6878
private static final long MAX_STUBBING_BINARY_SIZE = 102400;
6979

80+
/** Root directory for extracted body files. */
81+
private static final String EXTRACTED_BODY_FILE_ROOT = "src/test/resources/__files";
82+
7083
/**
7184
* Environment variable holding the fake service mode.
7285
*
@@ -195,9 +208,24 @@ private String getStubbingDir(ExtensionContext context) {
195208
}
196209

197210
/** Loads stub mappings from the stubbing directory. */
198-
private void loadStubMappings(WireMockRuntimeInfo wireMockRuntimeInfo, ExtensionContext context) {
199-
String stubbingDir = getStubbingDir(context);
200-
wireMockRuntimeInfo.getWireMock().loadMappingsFrom(stubbingDir);
211+
private void loadStubMappings(WireMockRuntimeInfo wireMockRuntimeInfo, ExtensionContext context)
212+
throws IOException {
213+
final String stubbingDir = getStubbingDir(context);
214+
final SingleRootFileSource fileSource = new SingleRootFileSource(stubbingDir + "/mappings");
215+
216+
final List<TextFile> mappingFiles =
217+
fileSource.listFilesRecursively().stream()
218+
.filter(byFileExtension("json"))
219+
.collect(Collectors.toList());
220+
221+
for (TextFile mappingFile : mappingFiles) {
222+
final StubMappingCollection stubCollection =
223+
JsonUtils.read(mappingFile.readContents(), StubMappingCollection.class);
224+
for (StubMapping mapping : stubCollection.getMappingOrMappings()) {
225+
embedExtractedBodyFile(mapping);
226+
wireMockRuntimeInfo.getWireMock().register(mapping);
227+
}
228+
}
201229
}
202230

203231
/** Starts recording stub mappings. */
@@ -241,6 +269,34 @@ private void clearFakeServiceProperties() {
241269
System.clearProperty(targetBaseUrl + FAKE_SERVICE_URI_PROP_SUFFIX);
242270
}
243271

272+
/** Embeds the extracted body file content into the stub mapping. */
273+
private static void embedExtractedBodyFile(final StubMapping mapping) {
274+
final SingleRootFileSource fileSource = new SingleRootFileSource(EXTRACTED_BODY_FILE_ROOT);
275+
final String bodyFileName = mapping.getResponse().getBodyFileName();
276+
277+
if (bodyFileName != null) {
278+
final ResponseDefinitionBuilder responseDefinitionBuilder =
279+
ResponseDefinitionBuilder.like(mapping.getResponse()).withBodyFile(null);
280+
if (ContentTypes.determineIsTextFromMimeType(getMimeType(mapping))) {
281+
final TextFile bodyFile = fileSource.getTextFileNamed(bodyFileName);
282+
responseDefinitionBuilder.withBody(bodyFile.readContentsAsString());
283+
} else {
284+
BinaryFile bodyFile = fileSource.getBinaryFileNamed(bodyFileName);
285+
responseDefinitionBuilder.withBody(bodyFile.readContents());
286+
}
287+
288+
mapping.setResponse(responseDefinitionBuilder.build());
289+
}
290+
}
291+
292+
/** Gets the MIME type of the response body. */
293+
private static String getMimeType(StubMapping mapping) {
294+
return Optional.ofNullable(mapping.getResponse().getHeaders())
295+
.map(HttpHeaders::getContentTypeHeader)
296+
.map(ContentTypeHeader::mimeTypePart)
297+
.orElse(null);
298+
}
299+
244300
/** Deletes files in the given directory. */
245301
private static void deleteFilesInDir(String dirPath) throws IOException {
246302
Path dir = Paths.get(dirPath);
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
package com.databricks.jdbc.integration.fakeservice;
2+
3+
import com.fasterxml.jackson.annotation.JsonInclude;
4+
import com.fasterxml.jackson.core.*;
5+
import com.fasterxml.jackson.databind.DeserializationFeature;
6+
import com.fasterxml.jackson.databind.ObjectMapper;
7+
import com.fasterxml.jackson.databind.ObjectWriter;
8+
import com.fasterxml.jackson.databind.SerializationFeature;
9+
import com.fasterxml.jackson.databind.cfg.JsonNodeFeature;
10+
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
11+
import java.io.IOException;
12+
13+
/** Utility class for JSON operations. */
14+
public class JsonUtils {
15+
16+
/** {@link ObjectMapper} instance holder. */
17+
private static final InheritableThreadLocal<ObjectMapper> objectMapperHolder =
18+
new InheritableThreadLocal<>() {
19+
@Override
20+
protected ObjectMapper initialValue() {
21+
ObjectMapper objectMapper =
22+
new ObjectMapper(
23+
new JsonFactoryBuilder()
24+
.streamReadConstraints(
25+
StreamReadConstraints.builder()
26+
.maxStringLength(Math.toIntExact(30000000))
27+
.build())
28+
.build());
29+
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
30+
objectMapper.configure(JsonNodeFeature.STRIP_TRAILING_BIGDECIMAL_ZEROES, false);
31+
objectMapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
32+
objectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
33+
objectMapper.configure(JsonParser.Feature.IGNORE_UNDEFINED, true);
34+
objectMapper.configure(JsonGenerator.Feature.WRITE_BIGDECIMAL_AS_PLAIN, true);
35+
objectMapper.configure(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS, true);
36+
objectMapper.registerModule(new JavaTimeModule());
37+
objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
38+
objectMapper.enable(JsonParser.Feature.INCLUDE_SOURCE_IN_LOCATION);
39+
return objectMapper;
40+
}
41+
};
42+
43+
/** Read the given JSON string and return the object of the given class. */
44+
public static <T> T read(String json, Class<T> clazz) throws JsonProcessingException {
45+
return objectMapperHolder.get().readValue(json, clazz);
46+
}
47+
48+
/** Read the given byte array and return the object of the given class. */
49+
public static <T> T read(byte[] stream, Class<T> clazz) throws IOException {
50+
return objectMapperHolder.get().readValue(stream, clazz);
51+
}
52+
53+
/** Write the given object to a JSON string. */
54+
public static <T> String write(T object, Class<?> view) throws JsonProcessingException {
55+
ObjectWriter objectWriter = objectMapperHolder.get().writerWithDefaultPrettyPrinter();
56+
if (view != null) {
57+
objectWriter = objectWriter.withView(view);
58+
}
59+
return objectWriter.writeValueAsString(object);
60+
}
61+
}

src/test/java/com/databricks/jdbc/integration/fakeservice/StubMappingCredentialsCleaner.java

Lines changed: 23 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22

33
import static com.databricks.jdbc.client.impl.sdk.PathConstants.STATEMENT_PATH;
44

5+
import com.fasterxml.jackson.core.JsonProcessingException;
56
import com.github.tomakehurst.wiremock.common.FileSource;
7+
import com.github.tomakehurst.wiremock.common.Json;
68
import com.github.tomakehurst.wiremock.extension.Parameters;
79
import com.github.tomakehurst.wiremock.extension.StubMappingTransformer;
810
import com.github.tomakehurst.wiremock.stubbing.StubMapping;
@@ -34,13 +36,17 @@ public StubMapping transform(StubMapping stubMapping, FileSource files, Paramete
3436
String serverHeaderValue =
3537
stubMapping.getResponse().getHeaders().getHeader(SERVER_HEADER_NAME).firstValue();
3638

37-
if (STATEMENT_PATH.equals(requestUrl)
38-
|| AMAZON_S3_SERVER_VALUE.equals(serverHeaderValue)
39+
if (AMAZON_S3_SERVER_VALUE.equals(serverHeaderValue)
40+
|| requestUrl.startsWith(STATEMENT_PATH)
3941
|| serverHeaderValue.startsWith(AZURE_STORAGE_SERVER_VALUE)) {
4042
// Clean credentials from statement requests (embedded S3 links) and Amazon S3 responses.
41-
String jsonString = StubMapping.buildJsonStringFor(stubMapping);
42-
String transformedJsonString = removeSensitiveCredentials(jsonString);
43-
return StubMapping.buildFrom(transformedJsonString);
43+
try {
44+
final String jsonString = getJsonStringFromStubMapping(stubMapping);
45+
final String transformedJsonString = removeSensitiveCredentials(jsonString);
46+
return getStubMappingFromJsonString(transformedJsonString);
47+
} catch (JsonProcessingException e) {
48+
throw new RuntimeException(e);
49+
}
4450
} else {
4551
return stubMapping;
4652
}
@@ -64,4 +70,16 @@ private String removeSensitiveCredentials(String jsonString) {
6470

6571
return buffer.toString();
6672
}
73+
74+
/** Returns the JSON string representation of the given {@link StubMapping}. */
75+
private String getJsonStringFromStubMapping(StubMapping stubMapping)
76+
throws JsonProcessingException {
77+
return JsonUtils.write(stubMapping, Json.PublicView.class);
78+
}
79+
80+
/** Returns the {@link StubMapping} from the given JSON string. */
81+
private StubMapping getStubMappingFromJsonString(String jsonString)
82+
throws JsonProcessingException {
83+
return JsonUtils.read(jsonString, StubMapping.class);
84+
}
6785
}

0 commit comments

Comments
 (0)