Skip to content

Commit f2d1806

Browse files
committed
Make things compile
somehow
1 parent e4b9e4b commit f2d1806

File tree

7 files changed

+76
-26
lines changed

7 files changed

+76
-26
lines changed

x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public abstract class BlockHash implements Releasable, SeenGroupIds {
3838

3939
protected final BlockFactory blockFactory;
4040

41-
BlockHash(BlockFactory blockFactory) {
41+
protected BlockHash(BlockFactory blockFactory) {
4242
this.blockFactory = blockFactory;
4343
}
4444

x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ $endif$
6363
/**
6464
* Maps a {@link $Type$Block} column to group ids.
6565
*/
66-
final class $Type$BlockHash extends BlockHash {
66+
public final class $Type$BlockHash extends BlockHash {
6767
private final int channel;
6868
final $Hash$ hash;
6969

@@ -105,7 +105,7 @@ final class $Type$BlockHash extends BlockHash {
105105
}
106106
}
107107

108-
IntVector add($Type$Vector vector) {
108+
public IntVector add($Type$Vector vector) {
109109
$if(BytesRef)$
110110
BytesRef scratch = new BytesRef();
111111
$endif$
@@ -125,7 +125,7 @@ $endif$
125125
}
126126
}
127127

128-
IntBlock add($Type$Block block) {
128+
public IntBlock add($Type$Block block) {
129129
$if(BytesRef)$
130130
var ordinals = block.asOrdinals();
131131
if (ordinals != null) {

x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java

Lines changed: 60 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,13 @@
2121
import org.elasticsearch.common.util.BytesRefHash;
2222
import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction;
2323
import org.elasticsearch.compute.aggregation.blockhash.BlockHash;
24-
import org.elasticsearch.compute.aggregation.blockhash.LongBlockHash;
24+
import org.elasticsearch.compute.aggregation.blockhash.IntBlockHash;
2525
import org.elasticsearch.compute.ann.Evaluator;
2626
import org.elasticsearch.compute.ann.Fixed;
2727
import org.elasticsearch.compute.data.Block;
2828
import org.elasticsearch.compute.data.BlockFactory;
2929
import org.elasticsearch.compute.data.BytesRefBlock;
3030
import org.elasticsearch.compute.data.CompositeBlock;
31-
import org.elasticsearch.compute.data.ElementType;
3231
import org.elasticsearch.compute.data.IntBlock;
3332
import org.elasticsearch.compute.data.IntVector;
3433
import org.elasticsearch.compute.data.Page;
@@ -181,13 +180,24 @@ private abstract class AbstractCategorizeBlockHash extends BlockHash {
181180
private final boolean outputPartial;
182181
protected final TokenListCategorizer.CloseableTokenListCategorizer categorizer;
183182

183+
AbstractCategorizeBlockHash(
184+
BlockFactory blockFactory,
185+
boolean outputPartial,
186+
TokenListCategorizer.CloseableTokenListCategorizer categorizer
187+
) {
188+
super(blockFactory);
189+
this.outputPartial = outputPartial;
190+
this.categorizer = categorizer;
191+
}
192+
184193
@Override
185194
public Block[] getKeys() {
186-
if (outputPartial) {
195+
if (outputPartial) {
187196
// NOCOMMIT load partial
188197
Block state = null;
189-
Block keys ; // NOCOMMIT do we even need to send the keys? it's just going to be 0 to the length of state
190-
return new Block[] {new CompositeBlock()};
198+
Block keys; // NOCOMMIT do we even need to send the keys? it's just going to be 0 to the length of state
199+
// return new Block[] {new CompositeBlock()};
200+
return null;
191201
}
192202

193203
// NOCOMMIT load final
@@ -219,17 +229,56 @@ private Block buildIntermediateBlock(BlockFactory blockFactory, int positionCoun
219229
private class CategorizeRawBlockHash extends AbstractCategorizeBlockHash {
220230
private final CategorizeEvaluator evaluator;
221231

232+
CategorizeRawBlockHash(
233+
BlockFactory blockFactory,
234+
boolean outputPartial,
235+
TokenListCategorizer.CloseableTokenListCategorizer categorizer,
236+
CategorizeEvaluator evaluator
237+
) {
238+
super(blockFactory, outputPartial, categorizer);
239+
this.evaluator = evaluator;
240+
}
241+
222242
@Override
223243
public void add(Page page, GroupingAggregatorFunction.AddInput addInput) {
224-
addInput.add(0, evaluator.eval(page));
244+
IntBlock result = (IntBlock) evaluator.eval(page);
245+
addInput.add(0, result);
225246
}
226247

248+
@Override
249+
public IntVector nonEmpty() {
250+
// TODO
251+
return null;
252+
}
253+
254+
@Override
255+
public BitArray seenGroupIds(BigArrays bigArrays) {
256+
// TODO
257+
return null;
258+
}
259+
260+
@Override
261+
public void close() {
262+
// TODO
263+
}
227264
}
228265

229266
private class CategorizedIntermediateBlockHash extends AbstractCategorizeBlockHash {
230-
private final LongBlockHash hash;
267+
private final IntBlockHash hash;
231268
private final int channel;
232269

270+
CategorizedIntermediateBlockHash(
271+
BlockFactory blockFactory,
272+
boolean outputPartial,
273+
TokenListCategorizer.CloseableTokenListCategorizer categorizer,
274+
IntBlockHash hash,
275+
int channel
276+
) {
277+
super(blockFactory, outputPartial, categorizer);
278+
this.hash = hash;
279+
this.channel = channel;
280+
}
281+
233282
public void add(Page page, GroupingAggregatorFunction.AddInput addInput) {
234283
CompositeBlock block = page.getBlock(channel);
235284
BytesRefBlock groupingState = block.getBlock(0);
@@ -240,11 +289,12 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) {
240289
} else {
241290
idMap = Collections.emptyMap();
242291
}
243-
try (IntBlock.Builder newIds = blockFactory.newIntBlockBuilder(groups.getTotalValueCount())) {
292+
try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(groups.getTotalValueCount())) {
244293
for (int i = 0; i < groups.getTotalValueCount(); i++) {
245-
newIds.appendInt(idMap.get(i));
294+
newIdsBuilder.appendInt(idMap.get(i));
246295
}
247-
addInput.add(page, hash.add(newIds.build()));
296+
IntBlock newIds = newIdsBuilder.build();
297+
addInput.add(0, hash.add(newIds));
248298
}
249299
}
250300

0 commit comments

Comments
 (0)