Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/changelog/121324.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 121324
summary: Support duplicate suggestions in completion field
area: Suggesters
type: bug
issues:
- 82432
Original file line number Diff line number Diff line change
Expand Up @@ -395,3 +395,75 @@ setup:
field: suggest_multi_contexts
contexts:
location: []

---
"Duplicate suggestions in different contexts":
- requires:
cluster_features: [ "search.completion_field.duplicate.support" ]
reason: "Support for duplicate suggestions in different contexts"

- do:
index:
refresh: true
index: test
id: "1"
body:
suggest_context:
-
input: "foox"
weight: 2
contexts:
color: ["red", "yellow"]
-
input: "foox"
weight: 3
contexts:
color: ["blue", "green", "yellow"]
- do:
search:
body:
suggest:
result:
text: "foo"
completion:
field: suggest_context
contexts:
color: "red"

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }
- match: { suggest.result.0.options.0.text: "foox" }
- match: { suggest.result.0.options.0._score: 2 }

- do:
search:
body:
suggest:
result:
text: "foo"
completion:
field: suggest_context
contexts:
color: "yellow"

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }
- match: { suggest.result.0.options.0.text: "foox" }
# the highest weight wins
- match: { suggest.result.0.options.0._score: 3 }

- do:
search:
body:
suggest:
result:
text: "foo"
completion:
field: suggest_context
contexts:
color: "blue"

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }
- match: { suggest.result.0.options.0.text: "foox" }
- match: { suggest.result.0.options.0._score: 3 }
Original file line number Diff line number Diff line change
Expand Up @@ -268,3 +268,80 @@

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }

---
"Duplicate suggestions in different contexts in sub-fields":
- requires:
cluster_features: [ "search.completion_field.duplicate.support" ]
reason: "Support for duplicate suggestions in different contexts"

- do:
indices.create:
index: completion_with_context
body:
mappings:
"properties":
"suggest_1":
"type": "completion"
"contexts":
-
"name": "color"
"type": "category"
"fields":
"suggest_2":
"type": "completion"
"contexts":
-
"name": "color"
"type": "category"


- do:
index:
refresh: true
index: completion_with_context
id: "1"
body:
suggest_1:
-
input: "foox"
weight: 2
contexts:
color: ["red"]
-
input: "foox"
weight: 3
contexts:
color: ["blue", "green"]
- do:
search:
body:
suggest:
result:
text: "foo"
completion:
field: suggest_1.suggest_2
contexts:
color: "red"

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }
- match: { suggest.result.0.options.0.text: "foox" }
- match: { suggest.result.0.options.0._score: 2 }


- do:
search:
body:
suggest:
result:
text: "foo"
completion:
field: suggest_1.suggest_2
contexts:
color: "blue"

- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 1 }
- match: { suggest.result.0.options.0.text: "foox" }
- match: { suggest.result.0.options.0._score: 3 }
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ public void parse(DocumentParserContext context) throws IOException {
// parse
XContentParser parser = context.parser();
Token token = parser.currentToken();
Map<String, CompletionInputMetadata> inputMap = Maps.newMapWithExpectedSize(1);
Map<String, CompletionInputMetadataContainer> inputMap = Maps.newMapWithExpectedSize(1);

if (token == Token.VALUE_NULL) { // ignore null values
return;
Expand All @@ -405,7 +405,7 @@ public void parse(DocumentParserContext context) throws IOException {
}

// index
for (Map.Entry<String, CompletionInputMetadata> completionInput : inputMap.entrySet()) {
for (Map.Entry<String, CompletionInputMetadataContainer> completionInput : inputMap.entrySet()) {
String input = completionInput.getKey();
if (input.trim().isEmpty()) {
context.addIgnoredField(mappedFieldType.name());
Expand All @@ -420,21 +420,33 @@ public void parse(DocumentParserContext context) throws IOException {
}
input = input.substring(0, len);
}
CompletionInputMetadata metadata = completionInput.getValue();
CompletionInputMetadataContainer cmc = completionInput.getValue();
if (fieldType().hasContextMappings()) {
fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts);
for (CompletionInputMetadata metadata : cmc.getValues()) {
fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts);
}
} else {
context.doc().add(new SuggestField(fieldType().name(), input, metadata.weight));
context.doc().add(new SuggestField(fieldType().name(), input, cmc.getWeight()));
}
}

context.addToFieldNames(fieldType().name());
for (CompletionInputMetadata metadata : inputMap.values()) {
multiFields().parse(
this,
context,
() -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation()))
);
for (CompletionInputMetadataContainer cmc : inputMap.values()) {
if (fieldType().hasContextMappings()) {
for (CompletionInputMetadata metadata : cmc.getValues()) {
multiFields().parse(
this,
context,
() -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation()))
);
}
} else {
CompletionInputMetadata metadata = cmc.getValue();
multiFields().parse(
this,
context,
() -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation()))
);
}
}
}

Expand All @@ -447,11 +459,13 @@ private void parse(
DocumentParserContext documentParserContext,
Token token,
XContentParser parser,
Map<String, CompletionInputMetadata> inputMap
Map<String, CompletionInputMetadataContainer> inputMap
) throws IOException {
String currentFieldName = null;
if (token == Token.VALUE_STRING) {
inputMap.put(parser.text(), new CompletionInputMetadata(parser.text(), Collections.<String, Set<String>>emptyMap(), 1));
CompletionInputMetadataContainer cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings());
cmc.add(new CompletionInputMetadata(parser.text(), Collections.emptyMap(), 1));
inputMap.put(parser.text(), cmc);
} else if (token == Token.START_OBJECT) {
Set<String> inputs = new HashSet<>();
int weight = 1;
Expand Down Expand Up @@ -531,8 +545,14 @@ private void parse(
}
}
for (String input : inputs) {
if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) {
inputMap.put(input, new CompletionInputMetadata(input, contextsMap, weight));
CompletionInputMetadata cm = new CompletionInputMetadata(input, contextsMap, weight);
CompletionInputMetadataContainer cmc = inputMap.get(input);
if (cmc != null) {
cmc.add(cm);
} else {
cmc = new CompletionInputMetadataContainer(fieldType().hasContextMappings());
cmc.add(cm);
inputMap.put(input, cmc);
}
}
} else {
Expand All @@ -543,10 +563,46 @@ private void parse(
}
}

static class CompletionInputMetadataContainer {
private final boolean hasContexts;
private final List<CompletionInputMetadata> list;
private CompletionInputMetadata single;

CompletionInputMetadataContainer(boolean hasContexts) {
this.hasContexts = hasContexts;
this.list = hasContexts ? new ArrayList<>() : null;
}

void add(CompletionInputMetadata cm) {
if (hasContexts) {
list.add(cm);
} else {
if (single == null || single.weight < cm.weight) {
single = cm;
}
}
}

List<CompletionInputMetadata> getValues() {
assert hasContexts;
return list;
}

CompletionInputMetadata getValue() {
assert hasContexts == false;
return single;
}

int getWeight() {
assert hasContexts == false;
return single.weight;
}
}

static class CompletionInputMetadata {
public final String input;
public final Map<String, Set<String>> contexts;
public final int weight;
private final String input;
private final Map<String, Set<String>> contexts;
private final int weight;

CompletionInputMetadata(String input, Map<String, Set<String>> contexts, int weight) {
this.input = input;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@ public Set<NodeFeature> getFeatures() {
}

public static final NodeFeature RETRIEVER_RESCORER_ENABLED = new NodeFeature("search.retriever.rescorer.enabled");
public static final NodeFeature COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS = new NodeFeature(
"search.completion_field.duplicate.support"
);

@Override
public Set<NodeFeature> getTestFeatures() {
return Set.of(RETRIEVER_RESCORER_ENABLED);
return Set.of(RETRIEVER_RESCORER_ENABLED, COMPLETION_FIELD_SUPPORTS_DUPLICATE_SUGGESTIONS);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,55 @@ public void testKeywordWithSubCompletionAndContext() throws Exception {
);
}

public void testDuplicateSuggestionsWithContexts() throws IOException {
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
b.field("type", "completion");
b.startArray("contexts");
{
b.startObject();
b.field("name", "place");
b.field("type", "category");
b.endObject();
}
b.endArray();
}));

ParsedDocument parsedDocument = defaultMapper.parse(source(b -> {
b.startArray("field");
{
b.startObject();
{
b.array("input", "timmy", "starbucks");
b.startObject("contexts").array("place", "cafe", "food").endObject();
b.field("weight", 10);
}
b.endObject();
b.startObject();
{
b.array("input", "timmy", "starbucks");
b.startObject("contexts").array("place", "restaurant").endObject();
b.field("weight", 1);
}
b.endObject();
}
b.endArray();
}));

List<IndexableField> indexedFields = parsedDocument.rootDoc().getFields("field");
assertThat(indexedFields, hasSize(4));

assertThat(
indexedFields,
containsInAnyOrder(
contextSuggestField("timmy"),
contextSuggestField("timmy"),
contextSuggestField("starbucks"),
contextSuggestField("starbucks")
)
);

}

public void testCompletionWithContextAndSubCompletion() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
b.field("type", "completion");
Expand Down