Skip to content

Commit 6207751

Browse files
author
Rafał Hibner
committed
Cleanup incremental
1 parent dfd9f9c commit 6207751

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

cpp/src/arrow/compute/kernels/aggregate_test.cc

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4222,18 +4222,18 @@ class TestRandomQuantileKernel : public TestPrimitiveQuantileKernel<ArrowType> {
42224222
const std::shared_ptr<ChunkedArray>& chunked, std::vector<double>& quantiles) {
42234223
Datum out;
42244224
TDigestQuantileOptions options(quantiles);
4225-
std::shared_ptr<Array> incremental_centroids;
4225+
std::shared_ptr<Scalar> incremental_centroids;
42264226
for (const auto& chunk : chunked->chunks()) {
42274227
ASSERT_OK_AND_ASSIGN(Datum centroids, TDigestMap(chunk));
4228-
ASSERT_OK_AND_ASSIGN(auto map_chunk, MakeArrayFromScalar(*centroids.scalar(), 1));
42294228
if (incremental_centroids) {
4230-
auto map_chunked =
4231-
std::make_shared<ChunkedArray>(ArrayVector{incremental_centroids, map_chunk});
4229+
// Is there a nicer way to make array from scalars?
4230+
ASSERT_OK_AND_ASSIGN(auto chunk1, MakeArrayFromScalar(*centroids.scalar(), 1));
4231+
ASSERT_OK_AND_ASSIGN(auto chunk2, MakeArrayFromScalar(*incremental_centroids, 1));
4232+
auto map_chunked = std::make_shared<ChunkedArray>(ArrayVector{chunk1, chunk2});
42324233
ASSERT_OK_AND_ASSIGN(Datum reduced, TDigestReduce(map_chunked));
4233-
ASSERT_OK_AND_ASSIGN(incremental_centroids,
4234-
MakeArrayFromScalar(*reduced.scalar(), 1));
4234+
incremental_centroids = reduced.scalar();
42354235
} else {
4236-
incremental_centroids = map_chunk;
4236+
incremental_centroids = centroids.scalar();
42374237
}
42384238

42394239
ASSERT_OK_AND_ASSIGN(

0 commit comments

Comments
 (0)