Skip to content

Commit 7e6c1dd

Browse files
committed
added extra test that checks that the optimization kicks in
1 parent c62dc26 commit 7e6c1dd

File tree

1 file changed

+173
-0
lines changed

1 file changed

+173
-0
lines changed
Lines changed: 173 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,173 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the "Elastic License
4+
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
* Public License v 1"; you may not use this file except in compliance with, at
6+
* your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
* License v3.0 only", or the "Server Side Public License, v 1".
8+
*/
9+
10+
package org.elasticsearch.index.mapper;
11+
12+
import org.apache.lucene.index.DirectoryReader;
13+
import org.apache.lucene.index.IndexWriter;
14+
import org.apache.lucene.index.IndexWriterConfig;
15+
import org.apache.lucene.index.LeafReaderContext;
16+
import org.apache.lucene.search.IndexSearcher;
17+
import org.apache.lucene.store.Directory;
18+
import org.elasticsearch.common.bytes.BytesArray;
19+
import org.elasticsearch.common.settings.Settings;
20+
import org.elasticsearch.index.IndexSettings;
21+
import org.elasticsearch.script.LongFieldScript;
22+
import org.elasticsearch.search.lookup.SearchLookup;
23+
import org.elasticsearch.test.ESSingleNodeTestCase;
24+
import org.elasticsearch.xcontent.XContentType;
25+
26+
import java.io.IOException;
27+
import java.util.Locale;
28+
import java.util.Map;
29+
import java.util.Set;
30+
31+
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
32+
import static org.hamcrest.Matchers.equalTo;
33+
34+
public class RuntimeFieldTests extends ESSingleNodeTestCase {
35+
36+
public void testRuntimeSourceOnlyField_sourceProviderOptimization() throws IOException {
37+
var mapping = jsonBuilder().startObject().startObject("runtime").startObject("field");
38+
mapping.field("type", "long");
39+
mapping.endObject().endObject().endObject();
40+
var indexService = createIndex("test-index", Settings.builder().put("index.mapping.source.mode", "synthetic").build(), mapping);
41+
42+
int numDocs = 256;
43+
try (Directory directory = newDirectory(); IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig())) {
44+
for (int i = 0; i < numDocs; i++) {
45+
BytesArray source = new BytesArray(String.format(Locale.ROOT, "{\"field\":%d,\"another_field\":123}", i));
46+
var doc = indexService.mapperService()
47+
.documentMapper()
48+
.parse(new SourceToParse(Integer.toString(i), source, XContentType.JSON))
49+
.rootDoc();
50+
iw.addDocument(doc);
51+
}
52+
iw.commit();
53+
iw.forceMerge(1);
54+
55+
try (var indexReader = DirectoryReader.open(iw)) {
56+
var searcher = new IndexSearcher(indexReader);
57+
LeafReaderContext leafReaderContext = indexReader.leaves().getFirst();
58+
var context = indexService.newSearchExecutionContext(0, 0, searcher, () -> 1L, null, Map.of());
59+
var fieldType = (AbstractScriptFieldType<?>) indexService.mapperService().fieldType("field");
60+
61+
// The other_field should have been filtered out, otherwise the mechanism that pushes field name as source filter to
62+
// SourceProvider isn't kicking in. Essentially checking that optimization in
63+
// ConcurrentSegmentSourceProvider.optimizedSourceProvider(...) kicks in:
64+
var leafFactory = (LongFieldScript.LeafFactory) fieldType.leafFactory(context);
65+
var fieldScript = leafFactory.newInstance(leafReaderContext);
66+
for (int i = 0; i < 256; i++) {
67+
fieldScript.runForDoc(i);
68+
var source = fieldScript.source().get().source();
69+
assertThat(source, equalTo(Map.of("field", i)));
70+
}
71+
72+
// Test that runtime based term query works as expected with the optimization:
73+
var termQuery = fieldType.termQuery(32, context);
74+
assertThat(searcher.count(termQuery), equalTo(1));
75+
76+
// Test that runtime based block loader works as expected with the optimization:
77+
var blockLoader = fieldType.blockLoader(blContext(context.lookup()));
78+
var columnReader = blockLoader.columnAtATimeReader(leafReaderContext);
79+
var block = (TestBlock) columnReader.read(TestBlock.factory(), TestBlock.docs(leafReaderContext), 0, false);
80+
for (int i = 0; i < block.size(); i++) {
81+
assertThat(block.get(i), equalTo((long) i));
82+
}
83+
}
84+
}
85+
}
86+
87+
public void testRuntimeSourceOnlyField_noSourceProviderOptimization() throws IOException {
88+
var mapping = jsonBuilder().startObject().startObject("runtime").startObject("field");
89+
mapping.field("type", "long");
90+
mapping.endObject().endObject().endObject();
91+
var indexService = createIndex("test-index", Settings.EMPTY, mapping);
92+
93+
int numDocs = 256;
94+
try (Directory directory = newDirectory(); IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig())) {
95+
for (int i = 0; i < numDocs; i++) {
96+
BytesArray source = new BytesArray(String.format(Locale.ROOT, "{\"field\":%d,\"another_field\":123}", i));
97+
var doc = indexService.mapperService()
98+
.documentMapper()
99+
.parse(new SourceToParse(Integer.toString(i), source, XContentType.JSON))
100+
.rootDoc();
101+
iw.addDocument(doc);
102+
}
103+
iw.commit();
104+
iw.forceMerge(1);
105+
106+
try (var indexReader = DirectoryReader.open(iw)) {
107+
var searcher = new IndexSearcher(indexReader);
108+
LeafReaderContext leafReaderContext = indexReader.leaves().getFirst();
109+
var context = indexService.newSearchExecutionContext(0, 0, searcher, () -> 1L, null, Map.of());
110+
var fieldType = (AbstractScriptFieldType<?>) indexService.mapperService().fieldType("field");
111+
112+
var leafFactory = (LongFieldScript.LeafFactory) fieldType.leafFactory(context);
113+
var fieldScript = leafFactory.newInstance(leafReaderContext);
114+
for (int i = 0; i < 256; i++) {
115+
fieldScript.runForDoc(i);
116+
var source = fieldScript.source().get().source();
117+
assertThat(source, equalTo(Map.of("field", i, "another_field", 123)));
118+
}
119+
120+
// Test that runtime based term query works as expected with the optimization:
121+
var termQuery = fieldType.termQuery(32, context);
122+
assertThat(searcher.count(termQuery), equalTo(1));
123+
124+
// Test that runtime based block loader works as expected with the optimization:
125+
var blockLoader = fieldType.blockLoader(blContext(context.lookup()));
126+
var columnReader = blockLoader.columnAtATimeReader(leafReaderContext);
127+
var block = (TestBlock) columnReader.read(TestBlock.factory(), TestBlock.docs(leafReaderContext), 0, false);
128+
for (int i = 0; i < block.size(); i++) {
129+
assertThat(block.get(i), equalTo((long) i));
130+
}
131+
}
132+
}
133+
}
134+
135+
static MappedFieldType.BlockLoaderContext blContext(SearchLookup lookup) {
136+
return new MappedFieldType.BlockLoaderContext() {
137+
@Override
138+
public String indexName() {
139+
throw new UnsupportedOperationException();
140+
}
141+
142+
@Override
143+
public IndexSettings indexSettings() {
144+
throw new UnsupportedOperationException();
145+
}
146+
147+
@Override
148+
public MappedFieldType.FieldExtractPreference fieldExtractPreference() {
149+
return MappedFieldType.FieldExtractPreference.NONE;
150+
}
151+
152+
@Override
153+
public SearchLookup lookup() {
154+
return lookup;
155+
}
156+
157+
@Override
158+
public Set<String> sourcePaths(String name) {
159+
throw new UnsupportedOperationException();
160+
}
161+
162+
@Override
163+
public String parentField(String field) {
164+
throw new UnsupportedOperationException();
165+
}
166+
167+
@Override
168+
public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
169+
return FieldNamesFieldMapper.FieldNamesFieldType.get(true);
170+
}
171+
};
172+
}
173+
}

0 commit comments

Comments
 (0)