diff --git a/docs/changelog/137442.yaml b/docs/changelog/137442.yaml new file mode 100644 index 0000000000000..b999927dff74a --- /dev/null +++ b/docs/changelog/137442.yaml @@ -0,0 +1,5 @@ +pr: 137442 +summary: Handle ._original stored fields with fls +area: "Authorization" +type: bug +issues: [] diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 0c45ae66e744f..00e6cdf82afae 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -208,7 +208,7 @@ public MatchOnlyTextFieldType( super(name, true, false, false, tsi, meta); this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer); this.textFieldType = new TextFieldType(name, isSyntheticSource, syntheticSourceDelegate); - this.originalName = isSyntheticSource ? name + "._original" : null; + this.originalName = isSyntheticSource ? name + KeywordFieldMapper.FALLBACK_FIELD_NAME_SUFFIX : null; this.withinMultiField = withinMultiField; this.storedFieldInBinaryFormat = storedFieldInBinaryFormat; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValues.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValues.java index 8544ddd0194f3..aa7b395519802 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValues.java @@ -26,6 +26,9 @@ * {@code _source}. */ public abstract class IgnoreMalformedStoredValues { + + public static final String IGNORE_MALFORMED_FIELD_NAME_SUFFIX = "._ignore_malformed"; + /** * Creates a stored field that stores malformed data to be used in synthetic source. * Name of the stored field is original name of the field with added conventional suffix. @@ -143,6 +146,6 @@ public void reset() { } public static String name(String fieldName) { - return fieldName + "._ignore_malformed"; + return fieldName + IGNORE_MALFORMED_FIELD_NAME_SUFFIX; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1bf2a57552b69..b7d5dd8465525 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -103,6 +103,7 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; private static final String HOST_NAME = "host.name"; + public static final String FALLBACK_FIELD_NAME_SUFFIX = "._original"; public static class Defaults { public static final FieldType FIELD_TYPE; @@ -537,7 +538,7 @@ public KeywordFieldType( this.isSyntheticSource = isSyntheticSource; this.indexSortConfig = builder.indexSortConfig; this.hasDocValuesSkipper = DocValuesSkipIndexType.NONE.equals(fieldType.docValuesSkipIndexType()) == false; - this.originalName = isSyntheticSource ? name + "._original" : null; + this.originalName = isSyntheticSource ? name + FALLBACK_FIELD_NAME_SUFFIX : null; } public KeywordFieldType(String name) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 908f58c5f9147..a99811541bc5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -40,7 +40,9 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.IgnoreMalformedStoredValues; import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.XContentBuilder; @@ -54,6 +56,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Function; /** * A {@link FilterLeafReader} that exposes only a subset @@ -68,36 +71,42 @@ public final class FieldSubsetReader extends SequentialStoredFieldsLeafReader { * Note that for convenience, the returned reader * can be used normally (e.g. passed to {@link DirectoryReader#openIfChanged(DirectoryReader)}) * and so on. - * @param in reader to filter - * @param filter fields to filter. + * + * @param in reader to filter + * @param filter fields to filter. + * @param isMapped whether a field is mapped or not. */ - public static DirectoryReader wrap(DirectoryReader in, CharacterRunAutomaton filter) throws IOException { - return new FieldSubsetDirectoryReader(in, filter); + public static DirectoryReader wrap(DirectoryReader in, CharacterRunAutomaton filter, Function isMapped) + throws IOException { + return new FieldSubsetDirectoryReader(in, filter, isMapped); } // wraps subreaders with fieldsubsetreaders. static class FieldSubsetDirectoryReader extends FilterDirectoryReader { private final CharacterRunAutomaton filter; + private final Function isMapped; - FieldSubsetDirectoryReader(DirectoryReader in, final CharacterRunAutomaton filter) throws IOException { + FieldSubsetDirectoryReader(DirectoryReader in, final CharacterRunAutomaton filter, Function isMapped) + throws IOException { super(in, new FilterDirectoryReader.SubReaderWrapper() { @Override public LeafReader wrap(LeafReader reader) { try { - return new FieldSubsetReader(reader, filter); + return new FieldSubsetReader(reader, filter, isMapped); } catch (IOException e) { throw new UncheckedIOException(e); } } }); this.filter = filter; + this.isMapped = isMapped; verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in); } @Override protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { - return new FieldSubsetDirectoryReader(in, filter); + return new FieldSubsetDirectoryReader(in, filter, isMapped); } /** Return the automaton that is used to filter fields. */ @@ -133,11 +142,20 @@ public CacheHelper getReaderCacheHelper() { /** * Wrap a single segment, exposing a subset of its fields. */ - FieldSubsetReader(LeafReader in, CharacterRunAutomaton filter) throws IOException { + FieldSubsetReader(LeafReader in, CharacterRunAutomaton filter, Function isMapped) throws IOException { super(in); ArrayList filteredInfos = new ArrayList<>(); for (FieldInfo fi : in.getFieldInfos()) { - if (filter.run(fi.name)) { + String name = fi.name; + if (fi.getName().endsWith(KeywordFieldMapper.FALLBACK_FIELD_NAME_SUFFIX) && isMapped.apply(fi.getName()) == false) { + name = fi.getName().substring(0, fi.getName().length() - KeywordFieldMapper.FALLBACK_FIELD_NAME_SUFFIX.length()); + } + if (fi.getName().endsWith(IgnoreMalformedStoredValues.IGNORE_MALFORMED_FIELD_NAME_SUFFIX) + && isMapped.apply(fi.getName()) == false) { + name = fi.getName() + .substring(0, fi.getName().length() - IgnoreMalformedStoredValues.IGNORE_MALFORMED_FIELD_NAME_SUFFIX.length()); + } + if (filter.run(name)) { filteredInfos.add(fi); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java index 71ba14b02667a..9eeb3edb9e136 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java @@ -96,7 +96,10 @@ public DirectoryReader apply(final DirectoryReader reader) { } } - return permissions.getFieldPermissions().filter(wrappedReader); + var searchContext = searchExecutionContextProvider.apply(shardId); + Function isMapped = searchContext::isFieldMapped; + + return permissions.getFieldPermissions().filter(wrappedReader, isMapped); } catch (IOException e) { logger.error("Unable to apply field level security"); throw ExceptionsHelper.convertToElastic(e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index c46f1350776b1..c11d4e78fbfeb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -32,6 +32,7 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -243,11 +244,11 @@ public boolean hasFieldLevelSecurity() { } /** Return a wrapped reader that only exposes allowed fields. */ - public DirectoryReader filter(DirectoryReader reader) throws IOException { + public DirectoryReader filter(DirectoryReader reader, Function isMapped) throws IOException { if (hasFieldLevelSecurity() == false) { return reader; } - return FieldSubsetReader.wrap(reader, permittedFieldsAutomaton); + return FieldSubsetReader.wrap(reader, permittedFieldsAutomaton, isMapped); } Automaton getIncludeAutomaton() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 4ab4d3172fab0..eb650f57ea3fd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -110,7 +110,11 @@ public void testIndexed() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -141,7 +145,11 @@ public void testPoints() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -200,7 +208,11 @@ public void testKnnVectors() throws Exception { doc.add(new KnnFloatVectorField("fieldB", new float[] { 3.0f, 2.0f, 1.0f })); iw.addDocument(doc); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); LeafReader leafReader = ir.leaves().get(0).reader(); // Check that fieldA behaves as normal @@ -235,7 +247,11 @@ public void testKnnByteVectors() throws Exception { doc.add(new KnnByteVectorField("fieldB", new byte[] { 3, 2, 1 })); iw.addDocument(doc); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); LeafReader leafReader = ir.leaves().get(0).reader(); // Check that fieldA behaves as normal @@ -275,7 +291,11 @@ public void testStoredFieldsString() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -302,7 +322,11 @@ public void testStoredFieldsBinary() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -329,7 +353,11 @@ public void testStoredFieldsInt() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -356,7 +384,11 @@ public void testStoredFieldsLong() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -383,7 +415,11 @@ public void testStoredFieldsFloat() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -410,7 +446,11 @@ public void testStoredFieldsDouble() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field { @@ -439,7 +479,11 @@ public void testVectors() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field Fields vectors = ir.termVectors().get(0); @@ -468,7 +512,11 @@ public void testNorms() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -494,7 +542,11 @@ public void testNumericDocValues() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -523,7 +575,11 @@ public void testBinaryDocValues() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -552,7 +608,11 @@ public void testSortedDocValues() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -581,7 +641,11 @@ public void testSortedSetDocValues() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -611,7 +675,11 @@ public void testSortedNumericDocValues() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -641,7 +709,11 @@ public void testFieldInfos() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> true + ); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -672,7 +744,7 @@ public void testSourceFilteringIntegration() throws Exception { // open reader Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", SourceFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> true); // see only one field { @@ -714,7 +786,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -731,7 +804,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -745,7 +819,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -763,7 +838,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -777,7 +853,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -795,7 +872,8 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { try ( DirectoryReader indexReader = FieldSubsetReader.wrap( wrapInMockESDirectoryReader(DirectoryReader.open(directory)), - new CharacterRunAutomaton(automaton) + new CharacterRunAutomaton(automaton), + (fieldName) -> true ) ) { String syntheticSource = syntheticSource(mapper, indexReader, doc.docs().size() - 1); @@ -806,6 +884,54 @@ public void testIgnoredSourceFilteringIntegration() throws Exception { } } + public void testVisibilityOriginalFieldNames() throws Exception { + try (Directory dir = newDirectory()) { + try (IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null))) { + Document doc = new Document(); + doc.add(new StoredField("a._original", new BytesRef("a"))); + doc.add(new StoredField("b._ignore_malformed", new BytesRef("b"))); + doc.add(new StoredField("c", new BytesRef("c"))); + iw.addDocument(doc); + + // Field a is mapped: + + var filter = new CharacterRunAutomaton(Automatons.patterns(List.of("a", "c"))); + try (DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), filter, (fieldName) -> false)) { + + Document fields = ir.storedFields().document(0); + assertEquals(2, fields.getFields().size()); + assertEquals(new BytesRef("a"), fields.getBinaryValue("a._original")); + assertEquals(new BytesRef("c"), fields.getBinaryValue("c")); + } + // Field a is not mapped: + try (DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), filter, (fieldName) -> true)) { + + Document fields = ir.storedFields().document(0); + assertEquals(1, fields.getFields().size()); + assertNull(fields.getBinaryValue("a._original")); + assertEquals(new BytesRef("c"), fields.getBinaryValue("c")); + } + // Field b is mapped: + filter = new CharacterRunAutomaton(Automatons.patterns(List.of("b", "c"))); + try (DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), filter, (fieldName) -> false)) { + + Document fields = ir.storedFields().document(0); + assertEquals(2, fields.getFields().size()); + assertEquals(new BytesRef("b"), fields.getBinaryValue("b._ignore_malformed")); + assertEquals(new BytesRef("c"), fields.getBinaryValue("c")); + } + // Field b is not mapped: + try (DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), filter, (fieldName) -> true)) { + + Document fields = ir.storedFields().document(0); + assertEquals(1, fields.getFields().size()); + assertNull(fields.getBinaryValue("b._ignore_malformed")); + assertEquals(new BytesRef("c"), fields.getBinaryValue("c")); + } + } + } + } + public void testSourceFiltering() { // include on top-level value Map map = new HashMap<>(); @@ -968,7 +1094,7 @@ public void testFieldNames() throws Exception { Set fields = new HashSet<>(); fields.add("fieldA"); Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", FieldNamesFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> false); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -1026,7 +1152,7 @@ public void testFieldNamesThreeFields() throws Exception { // open reader Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", "fieldC", FieldNamesFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> false); // see only two fields LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -1071,7 +1197,7 @@ public void testFieldNamesMissing() throws Exception { // open reader Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", "fieldC", FieldNamesFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> false); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -1105,7 +1231,7 @@ public void testFieldNamesOldIndex() throws Exception { // open reader Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", SourceFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> false); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -1133,7 +1259,11 @@ public void testCoreCacheKey() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("id"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("id")), + (fieldName) -> false + ); assertEquals(2, ir.numDocs()); assertEquals(1, ir.leaves().size()); @@ -1167,7 +1297,11 @@ public void testFilterAwayAllVectors() throws Exception { iw.addDocument(doc); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldB")), + (fieldName) -> false + ); // sees no fields assertNull(ir.termVectors().get(0)); @@ -1186,7 +1320,11 @@ public void testEmpty() throws Exception { iw.addDocument(new Document()); // open reader - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); + DirectoryReader ir = FieldSubsetReader.wrap( + DirectoryReader.open(iw), + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> false + ); // see no fields LeafReader segmentReader = ir.leaves().get(0).reader(); @@ -1217,11 +1355,12 @@ public void testWrapTwice() throws Exception { final DirectoryReader directoryReader = FieldSubsetReader.wrap( DirectoryReader.open(dir), - new CharacterRunAutomaton(Automata.makeString("fieldA")) + new CharacterRunAutomaton(Automata.makeString("fieldA")), + (fieldName) -> false ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> FieldSubsetReader.wrap(directoryReader, new CharacterRunAutomaton(Automata.makeString("fieldA"))) + () -> FieldSubsetReader.wrap(directoryReader, new CharacterRunAutomaton(Automata.makeString("fieldA")), (fieldName) -> false) ); assertThat( e.getMessage(), @@ -1407,7 +1546,7 @@ public void testProducesStoredFieldsReader() throws Exception { // open reader Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", SourceFieldMapper.NAME)); - DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); + DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton), (fieldName) -> false); TestUtil.checkReader(ir); assertThat(ir.leaves().size(), greaterThanOrEqualTo(1)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java index 104f6f2847ab0..60cb967fd35cf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperUnitTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesModule; @@ -86,7 +87,14 @@ public void tearDown() throws Exception { } public void testDefaultMetaFields() throws Exception { - securityIndexReaderWrapper = new SecurityIndexReaderWrapper(null, null, securityContext, licenseState, scriptService) { + var searchExecutionContext = mock(SearchExecutionContext.class); + securityIndexReaderWrapper = new SecurityIndexReaderWrapper( + id -> searchExecutionContext, + null, + securityContext, + licenseState, + scriptService + ) { @Override protected IndicesAccessControl getIndicesAccessControl() { IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldDataCacheWithFieldSubsetReaderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldDataCacheWithFieldSubsetReaderTests.java index 4d46cba676049..c80cf7608234c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldDataCacheWithFieldSubsetReaderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldDataCacheWithFieldSubsetReaderTests.java @@ -116,7 +116,7 @@ public void testSortedSetDVOrdinalsIndexFieldData_global() throws Exception { assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs)); assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1)); - DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty())); + DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()), (fieldName) -> true); global = sortedSetOrdinalsIndexFieldData.loadGlobal(ir); atomic = global.load(ir.leaves().get(0)); assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L)); @@ -129,7 +129,7 @@ public void testSortedSetDVOrdinalsIndexFieldData_segment() throws Exception { assertThat(atomic.getOrdinalsValues().getValueCount(), greaterThanOrEqualTo(1L)); } - DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty())); + DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()), (fieldName) -> true); for (LeafReaderContext context : ir.leaves()) { LeafOrdinalsFieldData atomic = sortedSetOrdinalsIndexFieldData.load(context); assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L)); @@ -145,7 +145,7 @@ public void testPagedBytesIndexFieldData_global() throws Exception { assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs)); assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1)); - DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty())); + DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()), (fieldName) -> true); global = pagedBytesIndexFieldData.loadGlobal(ir); atomic = global.load(ir.leaves().get(0)); assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L)); @@ -160,7 +160,7 @@ public void testPagedBytesIndexFieldData_segment() throws Exception { } assertThat(indexFieldDataCache.leafLevelBuilds, equalTo(ir.leaves().size())); - DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty())); + DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()), (fieldName) -> true); for (LeafReaderContext context : ir.leaves()) { LeafOrdinalsFieldData atomic = pagedBytesIndexFieldData.load(context); assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/30_field_level_security_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/30_field_level_security_synthetic_source.yml index 301cb01acd2d3..c038c33f68f5a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/30_field_level_security_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/30_field_level_security_synthetic_source.yml @@ -24,7 +24,134 @@ Filter single field: name: type: keyword secret: + type: match_only_text + + - do: + bulk: + index: index_fls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "secret":"squirrel"}' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_fls" ] + privileges: [ "read" ] + field_security: + grant: [ "name" ] + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: squirrel } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - is_false: "hits.hits.0._source.secret" + +--- +match_only_text field type grant all except secret field: + - do: + indices.create: + index: index_fls + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: type: keyword + secret: + type: match_only_text + + - do: + bulk: + index: index_fls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "secret":"squirrel"}' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_fls" ] + privileges: [ "read" ] + field_security: + grant: [ "*" ] + except: [ "secret" ] + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: squirrel } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - is_false: "hits.hits.0._source.secret" + +--- +match_only_text field type grant name field: + - do: + indices.create: + index: index_fls + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: + type: match_only_text + secret: + type: match_only_text - do: bulk: @@ -73,6 +200,136 @@ Filter single field: - match: { hits.hits.0._source.name: A } - is_false: "hits.hits.0._source.secret" +--- +keyword field type with ignore_above: + - do: + indices.create: + index: index_fls + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: + type: keyword + secret: + type: keyword + ignore_above: 3 + + - do: + bulk: + index: index_fls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "secret":"squirrel"}' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_fls" ] + privileges: [ "read" ] + field_security: + grant: [ "*" ] + except: [ "secret" ] + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: squirrel } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - is_false: "hits.hits.0._source.secret" + +--- +long field type with ignore_malformed: + - do: + indices.create: + index: index_fls + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + name: + type: keyword + secret: + type: long + ignore_malformed: true + + - do: + bulk: + index: index_fls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "secret":"squirrel"}' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_fls" ] + privileges: [ "read" ] + field_security: + grant: [ "*" ] + except: [ "secret" ] + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: squirrel } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_fls + - match: { hits.total.value: 1 } + - match: { hits.total.relation: "eq" } + - match: { hits.hits.0._source.name: A } + - is_false: "hits.hits.0._source.secret" + --- Filter fields in object: - do: diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 1281fa5c1fcfd..b7dab6ef72b54 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -1027,7 +1027,7 @@ private WildcardFieldMapper( this.indexVersionCreated = builder.indexCreatedVersion; this.ignoreAboveDefault = builder.ignoreAboveDefault; this.ignoreAbove = new IgnoreAbove(builder.ignoreAbove.getValue(), builder.indexMode, builder.indexCreatedVersion); - this.originalName = storeIgnored ? fullPath() + "._original" : null; + this.originalName = storeIgnored ? fullPath() + KeywordFieldMapper.FALLBACK_FIELD_NAME_SUFFIX : null; } @Override