Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import org.apache.lucene.index.FieldInfo;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;

import java.util.HashSet;
import java.util.Set;
Expand Down Expand Up @@ -50,6 +51,10 @@ public Status needsField(FieldInfo fieldInfo) {
if (fields.contains(fieldInfo.name)) {
return Status.YES;
}

if (fieldInfo.name.startsWith(IgnoredSourceFieldMapper.NAME)) {
return Status.YES;
}
return Status.NO;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.index.fieldvisitor;

import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.StoredFieldVisitor;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
import org.elasticsearch.search.fetch.StoredFieldsSpec;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

class IgnoredSourceFieldLoader extends StoredFieldLoader {

final Set<String> potentialFieldsToLoad;

IgnoredSourceFieldLoader(StoredFieldsSpec spec) {
Set<String> potentialFieldsToLoad = new HashSet<>();
for (String requiredStoredField : spec.requiredStoredFields()) {
if (requiredStoredField.startsWith(IgnoredSourceFieldMapper.NAME)) {
String fieldName = requiredStoredField.substring(IgnoredSourceFieldMapper.NAME.length());
potentialFieldsToLoad.addAll(splitIntoFieldPaths(fieldName));
}
}
this.potentialFieldsToLoad = potentialFieldsToLoad;
}

static Set<String> splitIntoFieldPaths(String fieldName) {
var paths = new HashSet<String>();
var current = new StringBuilder();
for (var part : fieldName.split("\\.")) {
if (current.isEmpty() == false) {
current.append('.');
}
current.append(part);
paths.add(IgnoredSourceFieldMapper.NAME + "." + current);
}
return paths;
}

@Override
public LeafStoredFieldLoader getLoader(LeafReaderContext ctx, int[] docs) throws IOException {
var reader = sequentialReader(ctx);
var visitor = new SFV(potentialFieldsToLoad);
return new LeafStoredFieldLoader() {

private int doc = -1;

@Override
public void advanceTo(int doc) throws IOException {
if (doc != this.doc) {
visitor.reset();
reader.accept(doc, visitor);
this.doc = doc;
}
}

@Override
public BytesReference source() {
return null;
}

@Override
public String id() {
return null;
}

@Override
public String routing() {
return null;
}

@Override
public Map<String, List<Object>> storedFields() {
return Map.of(IgnoredSourceFieldMapper.NAME, visitor.values);
}
};
}

@Override
public List<String> fieldsToLoad() {
return List.of(potentialFieldsToLoad.toArray(new String[0]));
}

static class SFV extends StoredFieldVisitor {

boolean found;
final List<Object> values = new ArrayList<>();
final Set<String> potentialFieldsToLoad;

SFV(Set<String> potentialFieldsToLoad) {
this.potentialFieldsToLoad = potentialFieldsToLoad;
}

@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (potentialFieldsToLoad.contains(fieldInfo.name)) {
found = true;
return Status.YES;
} else {
if (found) {
return Status.STOP;
} else {
return Status.NO;
}
}
}

@Override
public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException {
var result = IgnoredSourceFieldMapper.decode(value);
values.add(result);
}

void reset() {
values.clear();
found = false;
}

}

static boolean supports(StoredFieldsSpec spec) {
return spec.requiresSource() == false
&& spec.requiresMetadata() == false
&& spec.requiredStoredFields().size() == 1
&& spec.requiredStoredFields().iterator().next().startsWith(IgnoredSourceFieldMapper.NAME);
}

// TODO: use provided one
private static CheckedBiConsumer<Integer, StoredFieldVisitor, IOException> sequentialReader(LeafReaderContext ctx) throws IOException {
LeafReader leafReader = ctx.reader();
if (leafReader instanceof SequentialStoredFieldsLeafReader lf) {
return lf.getSequentialStoredFieldsReader()::document;
}
return leafReader.storedFields()::document;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ public static StoredFieldLoader fromSpec(StoredFieldsSpec spec) {
if (spec.noRequirements()) {
return StoredFieldLoader.empty();
}
if (IgnoredSourceFieldLoader.supports(spec)) {
return new IgnoredSourceFieldLoader(spec);
}
return create(spec.requiresSource(), spec.requiredStoredFields());
}

Expand Down Expand Up @@ -91,6 +94,10 @@ public static StoredFieldLoader fromSpecSequential(StoredFieldsSpec spec) {
if (spec.noRequirements()) {
return StoredFieldLoader.empty();
}
if (IgnoredSourceFieldLoader.supports(spec)) {
return new IgnoredSourceFieldLoader(spec);
}

List<String> fieldsToLoad = fieldsToLoad(spec.requiresSource(), spec.requiredStoredFields());
return new StoredFieldLoader() {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params

for (DocumentField field : metaFields.values()) {
// TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME) || field.getName().equals(IgnoredSourceFieldMapper.NAME)) {
if (field.getName().equals(IgnoredFieldMapper.NAME) || field.getName().startsWith(IgnoredSourceFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
} else {
builder.field(field.getName(), field.<Object>getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,12 @@
public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader {
private final Reader<?> reader;
private final String fieldName;
private final Set<String> fieldPaths;

protected FallbackSyntheticSourceBlockLoader(Reader<?> reader, String fieldName) {
this.reader = reader;
this.fieldName = fieldName;
this.fieldPaths = splitIntoFieldPaths(fieldName);
}

@Override
Expand All @@ -52,12 +54,12 @@ public ColumnAtATimeReader columnAtATimeReader(LeafReaderContext context) throws

@Override
public RowStrideReader rowStrideReader(LeafReaderContext context) throws IOException {
return new IgnoredSourceRowStrideReader<>(fieldName, reader);
return new IgnoredSourceRowStrideReader<>(fieldName, reader, fieldPaths);
}

@Override
public StoredFieldsSpec rowStrideStoredFieldSpec() {
return new StoredFieldsSpec(false, false, Set.of(IgnoredSourceFieldMapper.NAME));
return new StoredFieldsSpec(false, false, Set.of(IgnoredSourceFieldMapper.NAME + "." + fieldName));
}

@Override
Expand All @@ -70,7 +72,31 @@ public SortedSetDocValues ordinals(LeafReaderContext context) throws IOException
throw new UnsupportedOperationException();
}

private record IgnoredSourceRowStrideReader<T>(String fieldName, Reader<T> reader) implements RowStrideReader {
static Set<String> splitIntoFieldPaths(String fieldName) {
var paths = new HashSet<String>();
paths.add("_doc");
var current = new StringBuilder();
for (var part : fieldName.split("\\.")) {
if (current.isEmpty() == false) {
current.append('.');
}
current.append(part);
paths.add(current.toString());
}
return paths;
}

private static final class IgnoredSourceRowStrideReader<T> implements RowStrideReader {
private final String fieldName;
private final Reader<T> reader;
private final Set<String> fieldPaths;

private IgnoredSourceRowStrideReader(String fieldName, Reader<T> reader, Set<String> fieldPaths) {
this.fieldName = fieldName;
this.reader = reader;
this.fieldPaths = fieldPaths;
}

@Override
public void read(int docId, StoredFields storedFields, Builder builder) throws IOException {
var ignoredSource = storedFields.storedFields().get(IgnoredSourceFieldMapper.NAME);
Expand All @@ -80,26 +106,9 @@ public void read(int docId, StoredFields storedFields, Builder builder) throws I
}

Map<String, List<IgnoredSourceFieldMapper.NameValue>> valuesForFieldAndParents = new HashMap<>();

// Contains name of the field and all its parents
Set<String> fieldNames = new HashSet<>() {
{
add("_doc");
}
};

var current = new StringBuilder();
for (String part : fieldName.split("\\.")) {
if (current.isEmpty() == false) {
current.append('.');
}
current.append(part);
fieldNames.add(current.toString());
}

for (Object value : ignoredSource) {
IgnoredSourceFieldMapper.NameValue nameValue = IgnoredSourceFieldMapper.decode(value);
if (fieldNames.contains(nameValue.name())) {
IgnoredSourceFieldMapper.NameValue nameValue = (IgnoredSourceFieldMapper.NameValue) value;
if (fieldPaths.contains(nameValue.name())) {
valuesForFieldAndParents.computeIfAbsent(nameValue.name(), k -> new ArrayList<>()).add(nameValue);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ public void postParse(DocumentParserContext context) {
}

for (NameValue nameValue : context.getIgnoredFieldValues()) {
nameValue.doc().add(new StoredField(NAME, encode(nameValue)));
String fieldName = NAME + "." + nameValue.name;
nameValue.doc().add(new StoredField(fieldName, encode(nameValue)));
}
}

Expand All @@ -176,8 +177,12 @@ static byte[] encode(NameValue values) {
return bytes;
}

static NameValue decode(Object field) {
public static NameValue decode(Object field) {
byte[] bytes = ((BytesRef) field).bytes;
return decode(bytes);
}

public static NameValue decode(byte[] bytes) {
int encodedSize = ByteUtils.readIntLE(bytes, 0);
int nameSize = encodedSize % PARENT_OFFSET_IN_NAME_OFFSET;
int parentOffset = encodedSize / PARENT_OFFSET_IN_NAME_OFFSET;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ public void write(LeafStoredFieldLoader storedFieldLoader, int docId, XContentBu
if (loader != null) {
loader.load(e.getValue());
}
if (IgnoredSourceFieldMapper.NAME.equals(e.getKey())) {
if (e.getKey().startsWith(IgnoredSourceFieldMapper.NAME)) {
for (Object value : e.getValue()) {
if (objectsWithIgnoredFields == null) {
objectsWithIgnoredFields = new HashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t
}
// _ignored is the only multi-valued meta field
// TODO: can we avoid having an exception here?
if (IgnoredFieldMapper.NAME.equals(field.getName()) || IgnoredSourceFieldMapper.NAME.equals(field.getName())) {
if (IgnoredFieldMapper.NAME.equals(field.getName()) || field.getName().startsWith(IgnoredSourceFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
} else {
builder.field(field.getName(), field.<Object>getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
import org.elasticsearch.search.lookup.FieldLookup;
import org.elasticsearch.search.lookup.LeafFieldLookupProvider;

Expand All @@ -21,6 +22,7 @@
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;

/**
* Makes pre-loaded stored fields available via a LeafSearchLookup.
Expand Down Expand Up @@ -61,7 +63,13 @@ void setPreloadedStoredFieldNames(Set<String> preloadedStoredFieldNames) {
}

void setPreloadedStoredFieldValues(String id, Map<String, List<Object>> preloadedStoredFieldValues) {
assert preloadedStoredFieldNames.get().containsAll(preloadedStoredFieldValues.keySet())
assert preloadedStoredFieldNames.get()
.containsAll(
preloadedStoredFieldValues.keySet()
.stream()
.filter(s -> s.startsWith(IgnoredSourceFieldMapper.NAME) == false)
.collect(Collectors.toSet())
)
: "Provided stored field that was not expected to be preloaded? "
+ preloadedStoredFieldValues.keySet()
+ " - "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
}
final MappedFieldType fieldType = searchExecutionContext.getFieldType(matchingFieldName);
// NOTE: Exclude _ignored_source when requested via wildcard '*'
if (matchingFieldName.equals(IgnoredSourceFieldMapper.NAME) && Regex.isSimpleMatchPattern(storedField)) {
if (matchingFieldName.startsWith(IgnoredSourceFieldMapper.NAME) && Regex.isSimpleMatchPattern(storedField)) {
continue;
}
// NOTE: checking if the field is stored is required for backward compatibility reasons and to make
Expand Down
Loading