Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.salesforce.datacloud.jdbc.auth.AuthenticationSettings;
import com.salesforce.datacloud.jdbc.auth.DataCloudTokenProcessor;
import com.salesforce.datacloud.jdbc.auth.TokenProcessor;
import com.salesforce.datacloud.jdbc.core.partial.ChunkBased;
import com.salesforce.datacloud.jdbc.core.partial.RowBased;
import com.salesforce.datacloud.jdbc.exception.DataCloudJDBCException;
import com.salesforce.datacloud.jdbc.http.ClientBuilder;
Expand Down Expand Up @@ -195,6 +196,18 @@ public DataCloudResultSet getRowBasedResultSet(String queryId, long offset, long
return StreamingResultSet.of(queryId, executor, iterator);
}

@Unstable
public DataCloudResultSet getChunkBasedResultSet(String queryId, long chunkId, long limit) {
log.info("Get chunk-based result set. queryId={}, chunkId={}, limit={}", queryId, chunkId, limit);
val iterator = ChunkBased.of(executor, queryId, chunkId, limit);
return StreamingResultSet.of(queryId, executor, iterator);
}

@Unstable
public DataCloudResultSet getChunkBasedResultSet(String queryId, long chunkId) {
return getChunkBasedResultSet(queryId, chunkId, 1);
}

/**
* Use this to determine when a given query is complete by filtering the responses and a subsequent findFirst()
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
/*
* Copyright (c) 2024, Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.salesforce.datacloud.jdbc.core.partial;

import com.salesforce.datacloud.jdbc.core.HyperGrpcClientExecutor;
import com.salesforce.datacloud.jdbc.util.Unstable;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicLong;
import lombok.AccessLevel;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import salesforce.cdp.hyperdb.v1.QueryResult;

@Unstable
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public class ChunkBased implements Iterator<QueryResult> {
public static ChunkBased of(
@NonNull HyperGrpcClientExecutor client, @NonNull String queryId, long chunkId, long limit) {
return new ChunkBased(client, queryId, new AtomicLong(chunkId), chunkId + limit);
}

@NonNull private final HyperGrpcClientExecutor client;

@NonNull private final String queryId;

private final AtomicLong chunkId;

private final long limitId;

private Iterator<QueryResult> iterator;

@Override
public boolean hasNext() {
if (iterator == null) {
iterator = client.getQueryResult(queryId, chunkId.getAndIncrement(), false);
}

if (iterator.hasNext()) {
return true;
}

if (chunkId.get() < limitId) {
iterator = client.getQueryResult(queryId, chunkId.getAndIncrement(), true);
}

return iterator.hasNext();
}

@Override
public QueryResult next() {
if (!hasNext()) {
throw new NoSuchElementException();
}

return iterator.next();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/*
* Copyright (c) 2024, Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.salesforce.datacloud.jdbc.core.partial;

import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat;

import com.salesforce.datacloud.jdbc.core.DataCloudQueryStatus;
import com.salesforce.datacloud.jdbc.core.DataCloudStatement;
import com.salesforce.datacloud.jdbc.exception.DataCloudJDBCException;
import com.salesforce.datacloud.jdbc.hyper.HyperTestBase;
import io.grpc.StatusRuntimeException;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

@Slf4j
class ChunkBasedTest extends HyperTestBase {
private List<Integer> sut(String queryId, long chunkId, long limit) {
try (val connection = getHyperQueryConnection()) {
val rs = limit == 1
? connection.getChunkBasedResultSet(queryId, chunkId)
: connection.getChunkBasedResultSet(queryId, chunkId, limit);
return RowBasedTest.toStream(rs).collect(Collectors.toList());
}
}

private static final int smallSize = 5;
private static final int largeSize = 1024 * 1024 * 10;
private String small;
private String large;

@BeforeAll
void setupQueries() {
small = getQueryId(smallSize);
large = getQueryId(largeSize);
}

@SneakyThrows
@Test
void canGetSimpleChunk() {
val actual = sut(small, 0, 1);
assertThat(actual).containsExactly(1, 2, 3, 4, 5);
}

@SneakyThrows
@Test
void failsOnChunkOverrun() {
assertThatThrownBy(() -> sut(small, 0, 2))
.isInstanceOf(DataCloudJDBCException.class)
.hasMessage("Failed to load next batch")
.hasCauseInstanceOf(StatusRuntimeException.class)
.hasRootCauseMessage("OUT_OF_RANGE: The requested chunk id '1' is out of range");
}

@SneakyThrows
@Test
void consecutiveChunksIncludeAllData() {
val status = new AtomicReference<DataCloudQueryStatus>();
val last = new AtomicLong(0);
try (val connection = getHyperQueryConnection()) {
while (connection
.getQueryStatus(large)
.peek(status::set)
.noneMatch(t -> t.isExecutionFinished() || t.isResultProduced())) {
log.info("waiting for query to finish. queryId={}", large);
}

val rs = connection.getChunkBasedResultSet(large, 0, status.get().getChunkCount());

while (rs.next()) {
assertThat(rs.getLong(1)).isEqualTo(last.incrementAndGet());
}
}

assertThat(last.get()).isEqualTo(largeSize);
}

@SneakyThrows
private String getQueryId(int max) {
val query = String.format(
"select a, cast(a as numeric(38,18)) b, cast(a as numeric(38,18)) c, cast(a as numeric(38,18)) d from generate_series(1, %d) as s(a) order by a asc",
max);

try (val client = getHyperQueryConnection();
val statement = client.createStatement().unwrap(DataCloudStatement.class)) {
statement.executeAsyncQuery(query);
return statement.getQueryId();
}
}
}