diff --git a/docs/changelog/120835.yaml b/docs/changelog/120835.yaml new file mode 100644 index 0000000000000..f0810d29942b1 --- /dev/null +++ b/docs/changelog/120835.yaml @@ -0,0 +1,5 @@ +pr: 120835 +summary: Synthetic source doc values arrays encoding experiment 2 +area: Mapping +type: enhancement +issues: [] diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 68da320923898..bea6cb8eb7a50 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -98,4 +98,7 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") + task.skipTest("indices.create/21_synthetic_source_stored/index param - field ordering", "Synthetic source keep arrays now stores leaf arrays natively") + task.skipTest("indices.create/21_synthetic_source_stored/field param - keep nested array", "Synthetic source keep arrays now stores leaf arrays natively") + task.skipTest("indices.create/21_synthetic_source_stored/field param - keep root array", "Synthetic source keep arrays now stores leaf arrays natively") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index e51d527593d45..c78ac4c493fe5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -1024,7 +1024,7 @@ index param - field ordering: index: test - length: { hits.hits.0._source: 4 } - - match: { hits.hits.0._source: { "a": "2", "b": [ { "bb": 100, "aa": 200 }, { "aa": 300, "bb": 400 } ], "c": [30, 20, 10], "d": [ { "bb": 10, "aa": 20 }, { "aa": 30, "bb": 40 } ] } } + - match: { hits.hits.0._source: { "a": "2", "b": [ { "bb": 100, "aa": 200 }, { "aa": 300, "bb": 400 } ], "c": ["30", "20", "10"], "d": [ { "bb": 10, "aa": 20 }, { "aa": 30, "bb": 40 } ] } } --- diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 5a417c541d716..e1c88e874bb9d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -9,10 +9,12 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; @@ -24,6 +26,8 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; @@ -36,6 +40,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -53,6 +58,8 @@ */ public final class DocumentParser { + private static final Logger LOGGER = LogManager.getLogger(DocumentParser.class); + public static final IndexVersion DYNAMICALLY_MAP_DENSE_VECTORS_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; static final NodeFeature FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE = new NodeFeature( "mapper.fix_parsing_subobjects_false_dynamic_false" @@ -148,7 +155,7 @@ private void internalParseDocument(MetadataFieldMapper[] metadataFieldsMappers, } executeIndexTimeScripts(context); - + processArrayOffsets(context); for (MetadataFieldMapper metadataMapper : metadataFieldsMappers) { metadataMapper.postParse(context); } @@ -157,6 +164,41 @@ private void internalParseDocument(MetadataFieldMapper[] metadataFieldsMappers, } } + private static void processArrayOffsets(DocumentParserContext context) throws IOException { + var offsets = context.getOffSetsByField(); + for (var entry : offsets.entrySet()) { + var fieldName = entry.getKey(); + var offset = entry.getValue(); + if (offset.valueToOffsets.isEmpty()) { + continue; + } + + if (offset.currentOffset == 1 && offset.inArray == false) { + continue; + } + + int ord = 0; + int[] offsetToOrd = new int[offset.currentOffset]; + for (var offsetEntry : offset.valueToOffsets.entrySet()) { + for (var offsetAndLevel : offsetEntry.getValue()) { + offsetToOrd[offsetAndLevel] = ord; + } + ord++; + } + + // TODO: remove later + LOGGER.info("values=" + offset.valueToOffsets); + LOGGER.info("offsetToOrd=" + Arrays.toString(offsetToOrd)); + + try (var streamOutput = new BytesStreamOutput()) { + // TODO: optimize + // This array allows to retain the original ordering of the leaf array and duplicate values. + streamOutput.writeVIntArray(offsetToOrd); + context.doc().add(new BinaryDocValuesField(fieldName, streamOutput.bytes().toBytesRef())); + } + } + } + private static void executeIndexTimeScripts(DocumentParserContext context) { List indexTimeScriptMappers = context.mappingLookup().indexTimeScriptMappers(); if (indexTimeScriptMappers.isEmpty()) { @@ -687,7 +729,7 @@ private static void parseNonDynamicArray( // Check if we need to record the array source. This only applies to synthetic source. boolean canRemoveSingleLeafElement = false; - if (context.canAddIgnoredField()) { + if (context.canAddIgnoredField() && (mapper != null && mapper.supportsStoringArraysNatively() == false)) { Mapper.SourceKeepMode mode = Mapper.SourceKeepMode.NONE; boolean objectWithFallbackSyntheticSource = false; if (mapper instanceof ObjectMapper objectMapper) { @@ -725,10 +767,13 @@ private static void parseNonDynamicArray( // In synthetic source, if any array element requires storing its source as-is, it takes precedence over // elements from regular source loading that are then skipped from the synthesized array source. // To prevent this, we track that parsing sub-context is within array scope. - context = context.maybeCloneForArray(mapper); + if (mapper != null && mapper.supportsStoringArraysNatively() == false) { + context = context.maybeCloneForArray(mapper); + } XContentParser parser = context.parser(); XContentParser.Token token; + context.setInArray(true); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { parseObject(context, lastFieldName); @@ -743,6 +788,7 @@ private static void parseNonDynamicArray( parseValue(context, lastFieldName); } } + context.setInArray(false); postProcessDynamicArrayMapping(context, lastFieldName); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 51e4e9f4c1b5e..c659221871a7d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -33,6 +33,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** * Context used when parsing incoming documents. Holds everything that is needed to parse a document as well as @@ -84,6 +85,21 @@ public LuceneDocument doc() { protected void addDoc(LuceneDocument doc) { in.addDoc(doc); } + + @Override + public Map getOffSetsByField() { + return in.getOffSetsByField(); + } + + @Override + void recordOffset(String field, String value) { + in.recordOffset(field, value); + } + + @Override + public void setInArray(boolean inArray) { + in.setInArray(inArray); + } } /** @@ -134,6 +150,9 @@ private enum Scope { private final SeqNoFieldMapper.SequenceIDFields seqID; private final Set fieldsAppliedFromTemplates; + private final Map offsetsPerField = new HashMap<>(); + private boolean inArray; + /** * Fields that are copied from values of other fields via copy_to. * This per-document state is needed since it is possible @@ -470,6 +489,30 @@ public Set getCopyToFields() { return copyToFields; } + public static class Offsets { + + public int currentOffset; + public boolean inArray; + public final Map> valueToOffsets = new TreeMap<>(); + + } + + public Map getOffSetsByField() { + return offsetsPerField; + } + + void recordOffset(String field, String value) { + Offsets arrayOffsets = offsetsPerField.computeIfAbsent(field, k -> new Offsets()); + int nextOffset = arrayOffsets.currentOffset++; + var offsets = arrayOffsets.valueToOffsets.computeIfAbsent(value, s -> new ArrayList<>()); + offsets.add(nextOffset); + arrayOffsets.inArray = inArray; + } + + public void setInArray(boolean inArray) { + this.inArray = inArray; + } + /** * Add a new mapper dynamically created while parsing. * diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index bdcf9bf98279f..09f97fe437f09 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -87,6 +87,7 @@ public final class KeywordFieldMapper extends FieldMapper { private static final Logger logger = LogManager.getLogger(KeywordFieldMapper.class); public static final String CONTENT_TYPE = "keyword"; + public static final String OFFSETS_FIELD_NAME_SUFFIX = ".offsets"; public static class Defaults { public static final FieldType FIELD_TYPE; @@ -182,6 +183,7 @@ public static final class Builder extends FieldMapper.DimensionBuilder { private final IndexAnalyzers indexAnalyzers; private final ScriptCompiler scriptCompiler; private final IndexVersion indexCreatedVersion; + private final SourceKeepMode indexSourceKeepMode; public Builder(final String name, final MappingParserContext mappingParserContext) { this( @@ -189,7 +191,8 @@ public Builder(final String name, final MappingParserContext mappingParserContex mappingParserContext.getIndexAnalyzers(), mappingParserContext.scriptCompiler(), IGNORE_ABOVE_SETTING.get(mappingParserContext.getSettings()), - mappingParserContext.getIndexSettings().getIndexVersionCreated() + mappingParserContext.getIndexSettings().getIndexVersionCreated(), + mappingParserContext.getIndexSettings().sourceKeepMode() ); } @@ -198,7 +201,8 @@ public Builder(final String name, final MappingParserContext mappingParserContex IndexAnalyzers indexAnalyzers, ScriptCompiler scriptCompiler, int ignoreAboveDefault, - IndexVersion indexCreatedVersion + IndexVersion indexCreatedVersion, + SourceKeepMode indexSourceKeepMode ) { super(name); this.indexAnalyzers = indexAnalyzers; @@ -233,10 +237,11 @@ public Builder(final String name, final MappingParserContext mappingParserContex throw new IllegalArgumentException("[ignore_above] must be positive, got [" + v + "]"); } }); + this.indexSourceKeepMode = indexSourceKeepMode; } public Builder(String name, IndexVersion indexCreatedVersion) { - this(name, null, ScriptCompiler.NONE, Integer.MAX_VALUE, indexCreatedVersion); + this(name, null, ScriptCompiler.NONE, Integer.MAX_VALUE, indexCreatedVersion, SourceKeepMode.NONE); } public Builder ignoreAbove(int ignoreAbove) { @@ -370,13 +375,36 @@ public KeywordFieldMapper build(MapperBuilderContext context) { } super.hasScript = script.get() != null; super.onScriptError = onScriptError.getValue(); + + var sourceKeepMode = this.sourceKeepMode.orElse(indexSourceKeepMode); + BinaryFieldMapper offsetsFieldMapper; + if (context.isSourceSynthetic() + && sourceKeepMode == SourceKeepMode.ARRAYS + && fieldtype.stored() == false + && copyTo.copyToFields().isEmpty() + && multiFieldsBuilder.hasMultiFields() == false) { + // Skip stored, we will be synthesizing from stored fields, no point to keep track of the offsets + // Skip copy_to, supporting that requires more work. However, copy_to usage is rare in metrics and logging use cases + + // keep track of value offsets so that we can reconstruct arrays from doc values in order as was specified during indexing + // (if field is stored then there is no point of doing this) + offsetsFieldMapper = new BinaryFieldMapper.Builder( + context.buildFullName(leafName() + OFFSETS_FIELD_NAME_SUFFIX), + context.isSourceSynthetic() + ).docValues(true).build(context); + } else { + offsetsFieldMapper = null; + } + return new KeywordFieldMapper( leafName(), fieldtype, buildFieldType(context, fieldtype), builderParams(this, context), context.isSourceSynthetic(), - this + this, + offsetsFieldMapper, + indexSourceKeepMode ); } } @@ -867,6 +895,8 @@ public boolean hasNormalizer() { private final IndexAnalyzers indexAnalyzers; private final int ignoreAboveDefault; private final int ignoreAbove; + private final BinaryFieldMapper offsetsFieldMapper; + private final SourceKeepMode indexSourceKeepMode; private KeywordFieldMapper( String simpleName, @@ -874,7 +904,9 @@ private KeywordFieldMapper( KeywordFieldType mappedFieldType, BuilderParams builderParams, boolean isSyntheticSource, - Builder builder + Builder builder, + BinaryFieldMapper offsetsFieldMapper, + SourceKeepMode indexSourceKeepMode ) { super(simpleName, mappedFieldType, builderParams); assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; @@ -891,6 +923,8 @@ private KeywordFieldMapper( this.isSyntheticSource = isSyntheticSource; this.ignoreAboveDefault = builder.ignoreAboveDefault; this.ignoreAbove = builder.ignoreAbove.getValue(); + this.offsetsFieldMapper = offsetsFieldMapper; + this.indexSourceKeepMode = indexSourceKeepMode; } @Override @@ -967,6 +1001,9 @@ private void indexValue(DocumentParserContext context, String value) { if (fieldType().hasDocValues() == false && fieldType.omitNorms()) { context.addToFieldNames(fieldType().name()); } + if (offsetsFieldMapper != null) { + context.recordOffset(offsetsFieldMapper.fullPath(), value); + } } private static String normalizeValue(NamedAnalyzer normalizer, String field, String value) { @@ -1008,9 +1045,9 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion).dimension( - fieldType().isDimension() - ).init(this); + return new Builder(leafName(), indexAnalyzers, scriptCompiler, ignoreAboveDefault, indexCreatedVersion, indexSourceKeepMode) + .dimension(fieldType().isDimension()) + .init(this); } @Override @@ -1063,7 +1100,8 @@ protected void writeValue(Object value, XContentBuilder b) throws IOException { } }); } else if (hasDocValues) { - layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { + String offsetsFullPath = offsetsFieldMapper != null ? offsetsFieldMapper.fullPath() : null; + layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath(), offsetsFullPath) { @Override protected BytesRef convert(BytesRef value) { @@ -1090,4 +1128,9 @@ protected void writeValue(Object value, XContentBuilder b) throws IOException { return new CompositeSyntheticFieldLoader(leafFieldName, fullFieldName, layers); } + + @Override + public boolean supportsStoringArraysNatively() { + return offsetsFieldMapper != null; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index bafa74b662f00..53a4886dc9e59 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -212,4 +212,8 @@ public static FieldType freezeAndDeduplicateFieldType(FieldType fieldType) { * Defines how this mapper counts towards {@link MapperService#INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING}. */ public abstract int getTotalFieldsCount(); + + public boolean supportsStoringArraysNatively() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoaderLayer.java b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoaderLayer.java index 68781830ffe8f..a348e310e796e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoaderLayer.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoaderLayer.java @@ -9,11 +9,13 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,14 +30,27 @@ public abstract class SortedSetDocValuesSyntheticFieldLoaderLayer implements Com private static final Logger logger = LogManager.getLogger(SortedSetDocValuesSyntheticFieldLoaderLayer.class); private final String name; + private final String offsetsFieldName; private DocValuesFieldValues docValues = NO_VALUES; /** * Build a loader from doc values and, optionally, a stored field. - * @param name the name of the field to load from doc values + * + * @param name the name of the field to load from doc values */ public SortedSetDocValuesSyntheticFieldLoaderLayer(String name) { + this(name, null); + } + + /** + * Build a loader from doc values and, optionally, a stored field. + * + * @param name the name of the field to load from doc values + * @param offsetsFullPath The name of the offset field to synthesize arrays in correct order + */ + public SortedSetDocValuesSyntheticFieldLoaderLayer(String name, String offsetsFullPath) { this.name = name; + this.offsetsFieldName = offsetsFullPath; } @Override @@ -50,7 +65,7 @@ public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) th docValues = NO_VALUES; return null; } - if (docIdsInLeaf != null && docIdsInLeaf.length > 1) { + if (offsetsFieldName == null && docIdsInLeaf != null && docIdsInLeaf.length > 1) { /* * The singleton optimization is mostly about looking up ordinals * in sorted order and doesn't buy anything if there is only a single @@ -63,9 +78,16 @@ public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) th return loader; } } - ImmediateDocValuesLoader loader = new ImmediateDocValuesLoader(dv); - docValues = loader; - return loader; + BinaryDocValues oDv = offsetsFieldName != null ? DocValues.getBinary(reader, offsetsFieldName) : null; + if (oDv != null) { + OffsetDocValuesLoader loader = new OffsetDocValuesLoader(dv, oDv); + docValues = loader; + return loader; + } else { + ImmediateDocValuesLoader loader = new ImmediateDocValuesLoader(dv); + docValues = loader; + return loader; + } } @Override @@ -237,4 +259,86 @@ public void write(XContentBuilder b) throws IOException { * {@link BytesRef#deepCopyOf deep copy} if {@link #convert} didn't. */ protected abstract BytesRef preserve(BytesRef value); + + private class OffsetDocValuesLoader implements DocValuesLoader, DocValuesFieldValues { + private final BinaryDocValues oDv; + private final SortedSetDocValues dv; + private final ByteArrayStreamInput scratch = new ByteArrayStreamInput(); + + private boolean hasValue; + private int[] offsetToOrd; + + OffsetDocValuesLoader(SortedSetDocValues dv, BinaryDocValues oDv) { + this.dv = dv; + this.oDv = oDv; + } + + @Override + public boolean advanceToDoc(int docId) throws IOException { + hasValue = dv.advanceExact(docId); + if (hasValue) { + if (oDv.advanceExact(docId)) { + var encodedValue = oDv.binaryValue(); + scratch.reset(encodedValue.bytes, encodedValue.offset, encodedValue.length); + offsetToOrd = scratch.readVIntArray(); + } else { + offsetToOrd = null; + } + return true; + } else { + offsetToOrd = null; + return false; + } + } + + @Override + public int count() { + if (hasValue) { + if (offsetToOrd != null) { + // HACK: trick CompositeSyntheticFieldLoader to serialize this layer as array. + // (if offsetToOrd is not null, then at index time an array was always specified even if there is just one value) + return offsetToOrd.length + 1; + } else { + return dv.docValueCount(); + } + } else { + return 0; + } + } + + @Override + public void write(XContentBuilder b) throws IOException { + if (hasValue == false) { + return; + } + if (offsetToOrd != null) { + long[] ords = new long[dv.docValueCount()]; + for (int i = 0; i < dv.docValueCount(); i++) { + ords[i] = dv.nextOrd(); + } + + // TODO: remove later + logger.info("ords=" + Arrays.toString(ords)); + logger.info("vals=" + Arrays.stream(ords).mapToObj(ord -> { + try { + return dv.lookupOrd(ord).utf8ToString(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }).toList()); + logger.info("offsetToOrd=" + Arrays.toString(offsetToOrd)); + + for (int offset : offsetToOrd) { + long ord = ords[offset]; + BytesRef c = convert(dv.lookupOrd(ord)); + b.utf8Value(c.bytes, c.offset, c.length); + } + } else { + for (int i = 0; i < dv.docValueCount(); i++) { + BytesRef c = convert(dv.lookupOrd(dv.nextOrd())); + b.utf8Value(c.bytes, c.offset, c.length); + } + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index e3bdb3d45818f..790795e7cb31f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -244,7 +244,8 @@ public void testFetchSourceValue() throws IOException { createIndexAnalyzers(), ScriptCompiler.NONE, Integer.MAX_VALUE, - IndexVersion.current() + IndexVersion.current(), + Mapper.SourceKeepMode.NONE ).normalizer("lowercase").build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("value"), fetchSourceValue(normalizerMapper, "VALUE")); assertEquals(List.of("42"), fetchSourceValue(normalizerMapper, 42L)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java index fd024c5d23e28..4c5bfeb66b075 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java @@ -64,7 +64,8 @@ private KeywordFieldMapper.Builder getKeywordFieldMapperBuilder(boolean isStored IndexAnalyzers.of(Map.of(), Map.of("normalizer", Lucene.STANDARD_ANALYZER), Map.of()), ScriptCompiler.NONE, Integer.MAX_VALUE, - IndexVersion.current() + IndexVersion.current(), + Mapper.SourceKeepMode.NONE ); if (isStored) { keywordFieldMapperBuilder.stored(true); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 809660c5e9af8..d7a701cd7620a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1290,7 +1290,9 @@ public final void testSyntheticSourceMany() throws IOException { } SyntheticSourceExample example = support.example(maxValues); expected[i] = example.expected(); - iw.addDocument(mapper.parse(source(example::buildInput)).rootDoc()); + logger.info("expected[{}]:{}", i, expected[i]); + var sourceToParse = source(example::buildInput); + iw.addDocument(mapper.parse(sourceToParse).rootDoc()); } } try (DirectoryReader reader = DirectoryReader.open(directory)) { @@ -1722,6 +1724,7 @@ public void testSyntheticSourceKeepArrays() throws IOException { buildInput.accept(builder); builder.endObject(); String expected = Strings.toString(builder); + logger.info("expected:\n {}", expected); String actual = syntheticSource(mapperAll, buildInput); assertThat(actual, equalTo(expected)); }