Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix flaky test FieldDataLoadingIT.testIndicesFieldDataCacheSizeSetting ([#19571](https://github.com/opensearch-project/OpenSearch/pull/19571))
- Avoid primary shard failure caused by merged segment warmer exceptions ([#19436](https://github.com/opensearch-project/OpenSearch/pull/19436))
- Fix pull-based ingestion out-of-bounds offset scenarios and remove persisted offsets ([#19607](https://github.com/opensearch-project/OpenSearch/pull/19607))
- Fix NPE of ScriptScoreQuery ([#19650](https://github.com/opensearch-project/OpenSearch/pull/19650))

### Dependencies
- Update to Gradle 9.1 ([#19575](https://github.com/opensearch-project/OpenSearch/pull/19575))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import java.util.function.Function;

import static org.opensearch.index.query.QueryBuilders.boolQuery;
import static org.opensearch.index.query.QueryBuilders.idsQuery;
import static org.opensearch.index.query.QueryBuilders.matchQuery;
import static org.opensearch.index.query.QueryBuilders.scriptScoreQuery;
import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
Expand Down Expand Up @@ -217,4 +218,24 @@ public void testDisallowExpensiveQueries() throws Exception {
assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
}
}

// test case added for issue https://github.com/opensearch-project/OpenSearch/issues/18446
public void testScriptScoreNotExistingQuery() throws Exception {
assertAcked(prepareCreate("test-index").setMapping("field2", "type=double"));
client().prepareIndex("test-index").setId("1").setSource("field2", 1).get();
refresh();
indexRandomForConcurrentSearch("test-index");

Map<String, Object> params = new HashMap<>();
params.put("param1", 0.1);
Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params);
QueryBuilder idsQuery = idsQuery().addIds("2");
QueryBuilder scriptScoreQuery = scriptScoreQuery(idsQuery, script);
// Issue 18446 arises because of null Scorer returned from ScorerSupplier.
// However, Lucene prefers bulkScorer() instead of scorer() which doesn't trigger NPE as stated in issue 18446.
// As a result, we have to set profile to true to force the usage of scorer() instead of bulkScorer(),
// to make sure we test the intended code path
SearchResponse resp = client().prepareSearch("test-index").setQuery(scriptScoreQuery).setProfile(true).get();
assertNoFailures(resp);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -124,72 +124,45 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryScoreMode, 1.0f);

return new Weight(this) {

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would need a test for this change as well

@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context);
if (subQueryScorerSupplier == null) {
return null;
}
Weight weight = this;
return new ScorerSupplier() {
private Scorer scorer;
private BulkScorer bulkScorer;

@Override
public BulkScorer bulkScorer() throws IOException {
if (minScore == null) {
final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context);
if (subQueryBulkScorer == null) {
bulkScorer = null;
} else {
bulkScorer = new ScriptScoreBulkScorer(
subQueryBulkScorer,
subQueryScoreMode,
makeScoreScript(context),
boost
);
}
final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer();
return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost);
} else {
final Scorer scorer = get(Long.MAX_VALUE);
if (scorer != null) {
bulkScorer = new Weight.DefaultBulkScorer(scorer);
}
return super.bulkScorer();
}

return bulkScorer;
}

@Override
public Scorer get(long leadCost) throws IOException {
final Scorer subQueryScorer = subQueryWeight.scorer(context);

if (subQueryScorer == null) {
scorer = null;
} else {
Scorer scriptScorer = new ScriptScorer(
weight,
makeScoreScript(context),
subQueryScorer,
subQueryScoreMode,
boost,
null
);
if (minScore != null) {
scriptScorer = new MinScoreScorer(weight, scriptScorer, minScore);
}
scorer = scriptScorer;
Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost);
Scorer scriptScorer = new ScriptScorer(
weight,
makeScoreScript(context),
subQueryScorer,
subQueryScoreMode,
boost,
null
);
if (minScore != null) {
scriptScorer = new MinScoreScorer(weight, scriptScorer, minScore);
}

return scorer;
return scriptScorer;
}

@Override
public long cost() {
if (scorer != null) {
return scorer.iterator().cost();
} else if (bulkScorer != null) {
return bulkScorer.cost();
} else {
// We have no prior knowledge of how many docs might match for any given query term,
// so we assume that all docs could be a match.
return Integer.MAX_VALUE;
}
return subQueryScorerSupplier.cost();
}
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,10 @@
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
Expand Down Expand Up @@ -216,6 +218,64 @@ public void testTwoPhaseIteratorDelegation() throws IOException {
assertTrue("Expected to find at least one matching document", foundMatchingDoc);
}

public void testNullScorerSupplier() throws IOException {
Script script = new Script("script using explain");
ScoreScript.LeafFactory factory = newFactory(script, true, explanation -> {
assertNotNull(explanation);
explanation.set("this explains the score");
return 1.0;
});

ScriptScoreQuery query = new ScriptScoreQuery(new NullScorerSupplierQuery(), script, factory, null, "index", 0, Version.CURRENT);
Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
ScorerSupplier scorerSupplier = weight.scorerSupplier(null);
assertNull(scorerSupplier);
}

private static class NullScorerSupplierQuery extends Query {

@Override
public String toString(String field) {
return getClass().getSimpleName();
}

@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
}

@Override
public boolean equals(Object obj) {
return this == obj;
}

@Override
public int hashCode() {
return 0;
}

@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new Weight(this) {

@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
return null;
}

@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
};
}
}

private ScoreScript.LeafFactory newFactory(
Script script,
boolean needsScore,
Expand Down
Loading