Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,43 @@ public void testDeleteRuleForNonexistentId() throws Exception {
assertTrue("Expected error message for nonexistent rule ID", exception.getMessage().contains("no such index"));
}

public void testScrollRequestsAreAlsoTagged() throws Exception {
String workloadGroupId = "wlm_auto_tag_scroll";
String ruleId = "wlm_auto_tag_scroll_rule";
String indexName = "scroll_tagged_index";

setWlmMode("enabled");

WorkloadGroup workloadGroup = createWorkloadGroup("scroll_tagging_group", workloadGroupId);
updateWorkloadGroupInClusterState(PUT, workloadGroup);

FeatureType featureType = AutoTaggingRegistry.getFeatureType(WorkloadGroupFeatureType.NAME);
createRule(ruleId, "scroll tagging rule", indexName, featureType, workloadGroupId);

indexDocument(indexName);

assertBusy(() -> {
int completionsBefore = getCompletions(workloadGroupId);

SearchResponse initial = client().prepareSearch(indexName)
.setQuery(QueryBuilders.matchAllQuery())
.setScroll(TimeValue.timeValueMinutes(1))
.setSize(1)
.get();

String scrollId = initial.getScrollId();
assertNotNull("scrollId must not be null", scrollId);

int afterInitialSearch = getCompletions(workloadGroupId);
assertTrue("Expected completions to increase after initial search with scroll", afterInitialSearch > completionsBefore);

client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(1)).get();

int afterScroll = getCompletions(workloadGroupId);
assertTrue("Expected completions to increase after scroll request as well", afterScroll > afterInitialSearch);
});
}

// Helper functions
private void createRule(String ruleId, String ruleName, String indexPattern, FeatureType featureType, String workloadGroupId)
throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,18 @@
import org.opensearch.action.ActionRequest;
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchScrollRequest;
import org.opensearch.action.support.ActionFilter;
import org.opensearch.action.support.ActionFilterChain;
import org.opensearch.action.support.ActionRequestMetadata;
import org.opensearch.cluster.metadata.OptionallyResolvedIndices;
import org.opensearch.cluster.metadata.ResolvedIndices;
import org.opensearch.core.action.ActionListener;
import org.opensearch.core.action.ActionResponse;
import org.opensearch.plugin.wlm.rule.attribute_extractor.IndicesExtractor;
import org.opensearch.plugin.wlm.spi.AttributeExtractorExtension;
import org.opensearch.rule.InMemoryRuleProcessingService;
import org.opensearch.rule.RuleAttribute;
import org.opensearch.rule.attribute_extractor.AttributeExtractor;
import org.opensearch.rule.autotagging.Attribute;
import org.opensearch.rule.autotagging.FeatureType;
Expand All @@ -31,6 +35,7 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

import static org.opensearch.plugin.wlm.WorkloadManagementPlugin.PRINCIPAL_ATTRIBUTE_NAME;

Expand Down Expand Up @@ -80,14 +85,44 @@ public <Request extends ActionRequest, Response extends ActionResponse> void app
ActionListener<Response> listener,
ActionFilterChain<Request, Response> chain
) {
final boolean isValidRequest = request instanceof SearchRequest;
final boolean isSearchRequest = request instanceof SearchRequest;
final boolean isSearchScrollRequest = request instanceof SearchScrollRequest;
final boolean isValidRequest = isSearchRequest || isSearchScrollRequest;

if (!isValidRequest || wlmClusterSettingValuesProvider.getWlmMode() == WlmMode.DISABLED) {
chain.proceed(task, action, request, listener);
return;
}
List<AttributeExtractor<String>> attributeExtractors = new ArrayList<>();
attributeExtractors.add(new IndicesExtractor((IndicesRequest) request));
final OptionallyResolvedIndices optionallyResolved = actionRequestMetadata.resolvedIndices();
final boolean hasResolvedIndices = optionallyResolved instanceof ResolvedIndices;

if (hasResolvedIndices) {
final ResolvedIndices resolved = (ResolvedIndices) optionallyResolved;
final Set<String> names = resolved.local().names();

attributeExtractors.add(new AttributeExtractor<>() {
@Override
public Attribute getAttribute() {
return RuleAttribute.INDEX_PATTERN;
}

@Override
public Iterable<String> extract() {
return names;
}

@Override
public LogicalOperator getLogicalOperator() {
return LogicalOperator.AND;
}
});
} else if (isSearchRequest) {
attributeExtractors.add(new IndicesExtractor((IndicesRequest) request));
} else {
chain.proceed(task, action, request, listener);
return;
}
Comment on lines +97 to +125
Copy link

@coderabbitai coderabbitai bot Dec 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Search for LogicalOperator usage in similar contexts
rg -n -C 3 "LogicalOperator\." --type=java -g '!**/test/**' plugins/workload-management/

Repository: opensearch-project/OpenSearch

Length of output: 202


🌐 Web query:

OpenSearch workload management rule evaluation LogicalOperator index patterns

💡 Result:

Summary of how OpenSearch evaluates workload-management rules with index patterns

  • index_pattern supports exact names or wildcard/prefix patterns (e.g., "logs-2025-04" or "logs-*"). [1][2]
  • Within a single attribute value list (e.g., "index_pattern": ["logs-","events-"]), items are matched as OR — a request targeting any listed pattern satisfies that attribute. [1][2]
  • A rule with multiple different attributes (for example index_pattern plus principal.username) requires all those attribute conditions to be met to match (the rule’s attributes are conjunctive). In other words, attribute clauses within a rule are combined so the rule matches only when every specified attribute matches the request. [2]
  • When multiple rules match a request, OpenSearch resolves which rule to apply using precedence: attributes have a fixed priority order (e.g., principal.username/principal.role higher than index_pattern for workload_group rules), and more specific patterns win over more general ones (e.g., logs-prod-2025-* beats logs-prod-* beats logs-*). The rule with the highest-priority attribute / most specific match is applied. [1][2]
  • Practical implications:
    • Use specific index patterns to ensure the intended rule wins.
    • Use lists in index_pattern to cover multiple indices (they behave as OR).
    • Combine attributes in a rule to target narrower request sets (attributes are ANDed). [1][2]

Sources:
[1] OpenSearch — Workload group rules (Rules API / workload_group).
[2] OpenSearch — Rule-based auto-tagging (attribute matching, precedence, examples).


Fix LogicalOperator.AND to LogicalOperator.OR for index pattern matching.

Within a single attribute's value list, index patterns are evaluated as OR (a request matching any pattern satisfies the attribute). Line 117 should use LogicalOperator.OR instead of LogicalOperator.AND to align with OpenSearch workload management rule semantics. Using AND would incorrectly require all index patterns to match simultaneously, breaking the intended behavior where multiple patterns in a list provide alternative matching criteria.

The fallback behavior at lines 122-125 (proceeding silently when SearchScrollRequest lacks resolved indices) appears intentional as a defensive measure, though logging this scenario could improve observability for debugging unexpected states where the transport layer fails to provide resolved indices.

🤖 Prompt for AI Agents
In
plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/AutoTaggingActionFilter.java
around lines 97 to 125, the LogicalOperator used for the INDEX_PATTERN attribute
is incorrect: change LogicalOperator.AND to LogicalOperator.OR on the
AttributeExtractor at line ~117 so that multiple index patterns are treated as
alternatives (match any) rather than requiring all to match; leave the rest of
the control flow intact (keep the IndicesExtractor fallback and the early
proceed/return behavior), and optionally add a debug log where the code proceeds
silently for clarity if desired.

Copy link
Contributor Author

@laminelam laminelam Dec 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I followed the same logic than IndicesExtractor which uses AND.

public class IndicesExtractor implements AttributeExtractor<String> {
    private final IndicesRequest indicesRequest;

    /**
     * Default constructor
     * @param indicesRequest
     */
    public IndicesExtractor(IndicesRequest indicesRequest) {
        this.indicesRequest = indicesRequest;
    }

    @Override
    public Attribute getAttribute() {
        return RuleAttribute.INDEX_PATTERN;
    }

    @Override
    public Iterable<String> extract() {
        return List.of(indicesRequest.indices());
    }

    @Override
    public LogicalOperator getLogicalOperator() {
        return AND;
    }
}

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Search for LogicalOperator enum definition and usage in rule evaluation
rg -n -A 10 "enum LogicalOperator" --type=java

# Search for how evaluateLabel uses LogicalOperator
rg -n -C 5 "getLogicalOperator\(\)" --type=java -g '!**/test/**'

# Search for rule evaluation logic
rg -n -C 5 "evaluateLabel" --type=java -g '!**/test/**' | head -100

Length of output: 8550


🏁 Script executed:

#!/bin/bash
# Find tests for IndicesExtractor and rule evaluation with multiple indices
rg -n -B 5 -A 10 "IndicesExtractor" --type=java -g '**/test/**'

# Search for AttributeExtractor interface definition
ast-grep --pattern 'interface AttributeExtractor$_$ {
  $$$
}'

Length of output: 0


🏁 Script executed:

#!/bin/bash
# Look for the rule processing service implementation
fd -e java RuleProcessingService | head -5 | xargs cat

Length of output: 10746


@laminelam You're absolutely right, and I apologize for the confusion in my previous comment.

After reviewing the code more carefully, I found the documentation in AttributeExtractor.java that clarifies the semantics:

"if the request targets both index A and B, then a rule must have both index A and B as attributes, requiring an AND operator."

The LogicalOperator controls how multiple indices extracted from the request should be matched against rules, not how multiple patterns within a rule are evaluated:

  • AND: If a request targets indices ["logs-2024", "metrics-2024"], a matching rule must contain BOTH indices in its index_pattern attribute
  • OR: If a request targets indices ["logs-2024", "metrics-2024"], a matching rule can contain EITHER index (or both)

Using AND is correct because when a request operates on multiple indices simultaneously, you want to ensure the rule covers all of them. This prevents partial matches that could lead to incorrect tagging when sensitive and non-sensitive indices are accessed together.

Your implementation correctly follows the same logic as IndicesExtractor. My earlier comment about OR was based on a misunderstanding—I was conflating how patterns within a rule definition are evaluated (which is OR) with how the extractor's logical operator works (which should be AND for this use case).

Thank you for the clarification!

🐰
/___/
( o o )
( =^= ) learned something new!
/| |
(| |)


🧬 Code Graph Analysis Results

[plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rule/attribute_extractor/IndicesExtractor.java] (lines 23-48)

public class IndicesExtractor implements AttributeExtractor<String> {
    private final IndicesRequest indicesRequest;

    /**
     * Default constructor
     * @param indicesRequest
     */
    public IndicesExtractor(IndicesRequest indicesRequest) {
        this.indicesRequest = indicesRequest;
    }

    @Override
    public Attribute getAttribute() {
        return RuleAttribute.INDEX_PATTERN;
    }

    @Override
    public Iterable<String> extract() {
        return List.of(indicesRequest.indices());
    }

    @Override
    public LogicalOperator getLogicalOperator() {
        return AND;
    }
}

[server/src/main/java/org/opensearch/action/support/ActionRequestMetadata.java] (lines 21-61)

Docstring-style summary:

  • Purpose: ActionRequestMetadata tracks metadata about an ActionRequest, including the ability to resolve indices referenced by the request when supported by the transport action.
  • Method described: resolvedIndices()
    • Returns: OptionallyResolvedIndices
    • Behavior:
      • If the underlying transportAction is not an instance of TransportIndicesResolvingAction, returns an unknown/unknown-indices placeholder.
      • If it can resolve indices, calls resolveIndices(request) to obtain the actual indices information.
    • Important details:
      • Uses a generic type <Request extends ActionRequest, Response extends ActionResponse>.
      • The method relies on the transport-action's ability to resolve indices and may not cache results, as per the comment in the code.
  • This mechanism is used by callers to determine if indices can be resolved for a given request, enabling downstream logic to predicate on resolved local indices.


if (featureType.getAllowedAttributesRegistry().containsKey(PRINCIPAL_ATTRIBUTE_NAME)) {
Attribute attribute = featureType.getAllowedAttributesRegistry().get(PRINCIPAL_ATTRIBUTE_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@
import org.opensearch.action.ActionRequest;
import org.opensearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchScrollRequest;
import org.opensearch.action.support.ActionFilterChain;
import org.opensearch.action.support.ActionRequestMetadata;
import org.opensearch.cluster.metadata.ResolvedIndices;
import org.opensearch.common.util.concurrent.ThreadContext;
import org.opensearch.core.action.ActionListener;
import org.opensearch.core.action.ActionResponse;
Expand Down Expand Up @@ -93,6 +95,25 @@ public void testApplyForInValidRequest() {
verify(ruleProcessingService, times(0)).evaluateLabel(anyList());
}

public void testApplyForScrollRequestWithResolvedIndices() {
SearchScrollRequest request = mock(SearchScrollRequest.class);
ActionFilterChain<ActionRequest, ActionResponse> mockFilterChain = mock(TestActionFilterChain.class);

@SuppressWarnings("unchecked")
ActionRequestMetadata<ActionRequest, ActionResponse> metadata = mock(ActionRequestMetadata.class);
ResolvedIndices resolved = ResolvedIndices.of("logs-scroll-index");
when(metadata.resolvedIndices()).thenReturn(resolved);

try (ThreadContext.StoredContext context = threadPool.getThreadContext().stashContext()) {
when(ruleProcessingService.evaluateLabel(anyList())).thenReturn(Optional.of("ScrollQG_ID"));

autoTaggingActionFilter.apply(mock(Task.class), "Test", request, metadata, null, mockFilterChain);

assertEquals("ScrollQG_ID", threadPool.getThreadContext().getHeader(WorkloadGroupTask.WORKLOAD_GROUP_ID_HEADER));
verify(ruleProcessingService, times(1)).evaluateLabel(anyList());
}
}

public enum WLMFeatureType implements FeatureType {
WLM;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,9 @@ public void sendSearchResponse(InternalSearchResponse internalSearchResponse, At
raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures));
} else {
final Version minNodeVersion = clusterState.nodes().getMinNodeVersion();
final String scrollId = request.scroll() != null ? TransportSearchHelper.buildScrollId(queryResults, minNodeVersion) : null;
final String scrollId = request.scroll() != null
? TransportSearchHelper.buildScrollId(queryResults, request.indices(), minNodeVersion)
: null;
final String searchContextId;
if (buildPointInTimeFromSearchResults()) {
searchContextId = SearchContextId.encode(queryResults.asList(), aliasFilter, minNodeVersion);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,13 @@ public class ParsedScrollId {
private final String type;

private final SearchContextIdForNode[] context;
private final String[] originalIndices;

ParsedScrollId(String source, String type, SearchContextIdForNode[] context) {
ParsedScrollId(String source, String type, SearchContextIdForNode[] context, String[] originalIndices) {
this.source = source;
this.type = type;
this.context = context;
this.originalIndices = originalIndices;
}

public String getSource() {
Expand All @@ -72,6 +74,10 @@ public SearchContextIdForNode[] getContext() {
return context;
}

public String[] getOriginalIndices() {
return originalIndices;
}

public boolean hasLocalIndices() {
return Arrays.stream(context).anyMatch(c -> c.getClusterAlias() == null);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ public class SearchScrollRequest extends ActionRequest implements ToXContentObje

private String scrollId;
private Scroll scroll;
private transient ParsedScrollId parsedScrollId;

public SearchScrollRequest() {}

Expand Down Expand Up @@ -103,7 +104,10 @@ public SearchScrollRequest scrollId(String scrollId) {
}

public ParsedScrollId parseScrollId() {
return TransportSearchHelper.parseScrollId(scrollId);
if (parsedScrollId == null && scrollId != null) {
parsedScrollId = TransportSearchHelper.parseScrollId(scrollId);
}
return parsedScrollId;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,13 @@ static InternalScrollSearchRequest internalScrollSearchRequest(ShardSearchContex
return new InternalScrollSearchRequest(request, id);
}

public static final Version INDICES_IN_SCROLL_ID_VERSION = Version.V_3_3_2;

static String buildScrollId(AtomicArray<? extends SearchPhaseResult> searchPhaseResults, Version version) {
return buildScrollId(searchPhaseResults, null, version);
}

static String buildScrollId(AtomicArray<? extends SearchPhaseResult> searchPhaseResults, String[] originalIndices, Version version) {
try {
BytesStreamOutput out = new BytesStreamOutput();
out.writeString(INCLUDE_CONTEXT_UUID);
Expand All @@ -78,6 +84,19 @@ static String buildScrollId(AtomicArray<? extends SearchPhaseResult> searchPhase
out.writeString(searchShardTarget.getNodeId());
}
}

if (version.onOrAfter(INDICES_IN_SCROLL_ID_VERSION)) {
// To keep autotagging consistent between the initial SearchRequest
// and subsequent SearchScrollRequests, we store exactly the same
// index targets that were visible to the indices attribute during
// the "search" phase
if (originalIndices != null && originalIndices.length > 0) {
out.writeVInt(originalIndices.length);
for (String index : originalIndices) {
out.writeString(index);
}
}
}
byte[] bytes = BytesReference.toBytes(out.bytes());
return Base64.getUrlEncoder().encodeToString(bytes);
} catch (IOException e) {
Expand Down Expand Up @@ -114,10 +133,22 @@ static ParsedScrollId parseScrollId(String scrollId) {
}
context[i] = new SearchContextIdForNode(clusterAlias, target, new ShardSearchContextId(contextUUID, id));
}

String[] originalIndices;
if (in.getPosition() < bytes.length) {
final int numOriginalIndices = in.readVInt();
originalIndices = new String[numOriginalIndices];
for (int i = 0; i < numOriginalIndices; i++) {
originalIndices[i] = in.readString();
}
} else {
originalIndices = new String[0];
}

if (in.getPosition() != bytes.length) {
throw new IllegalArgumentException("Not all bytes were read");
}
return new ParsedScrollId(scrollId, type, context);
return new ParsedScrollId(scrollId, type, context, originalIndices);
} catch (Exception e) {
throw new IllegalArgumentException("Cannot parse scroll id", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@

import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.HandledTransportAction;
import org.opensearch.action.support.TransportIndicesResolvingAction;
import org.opensearch.cluster.metadata.OptionallyResolvedIndices;
import org.opensearch.cluster.metadata.ResolvedIndices;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.core.action.ActionListener;
Expand All @@ -48,7 +51,9 @@
*
* @opensearch.internal
*/
public class TransportSearchScrollAction extends HandledTransportAction<SearchScrollRequest, SearchResponse> {
public class TransportSearchScrollAction extends HandledTransportAction<SearchScrollRequest, SearchResponse>
implements
TransportIndicesResolvingAction<SearchScrollRequest> {

private final ClusterService clusterService;
private final SearchTransportService searchTransportService;
Expand Down Expand Up @@ -79,7 +84,7 @@ protected void doExecute(Task task, SearchScrollRequest request, ActionListener<
((WorkloadGroupTask) task).setWorkloadGroupId(threadPool.getThreadContext());
}

ParsedScrollId scrollId = TransportSearchHelper.parseScrollId(request.scrollId());
ParsedScrollId scrollId = request.parseScrollId();
Runnable action;
switch (scrollId.getType()) {
case ParsedScrollId.QUERY_THEN_FETCH_TYPE:
Expand Down Expand Up @@ -114,4 +119,26 @@ protected void doExecute(Task task, SearchScrollRequest request, ActionListener<
listener.onFailure(e);
}
}

@Override
public OptionallyResolvedIndices resolveIndices(SearchScrollRequest request) {
try {
final String scrollIdString = request.scrollId();
if (scrollIdString == null || scrollIdString.isEmpty()) {
return OptionallyResolvedIndices.unknown();
}

final ParsedScrollId parsed = request.parseScrollId();
if (parsed == null) {
return OptionallyResolvedIndices.unknown();
}
final String[] originalIndices = parsed.getOriginalIndices();
if (originalIndices == null || originalIndices.length == 0) {
return OptionallyResolvedIndices.unknown();
}
return ResolvedIndices.of(originalIndices);
} catch (Exception e) {
return OptionallyResolvedIndices.unknown();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,12 @@ public void testHasLocalIndices() {
new ShardSearchContextId(randomAlphaOfLength(8), randomLong())
);
}
final ParsedScrollId parsedScrollId = new ParsedScrollId(randomAlphaOfLength(8), randomAlphaOfLength(8), searchContextIdForNodes);
final ParsedScrollId parsedScrollId = new ParsedScrollId(
randomAlphaOfLength(8),
randomAlphaOfLength(8),
searchContextIdForNodes,
new String[0]
);

assertEquals(hasLocal, parsedScrollId.hasLocalIndices());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch
private static ParsedScrollId getParsedScrollId(SearchContextIdForNode... idsForNodes) {
List<SearchContextIdForNode> searchContextIdForNodes = Arrays.asList(idsForNodes);
Collections.shuffle(searchContextIdForNodes, random());
return new ParsedScrollId("", "test", searchContextIdForNodes.toArray(new SearchContextIdForNode[0]));
return new ParsedScrollId("", "test", searchContextIdForNodes.toArray(new SearchContextIdForNode[0]), new String[0]);
}

private ActionListener<SearchResponse> dummyListener() {
Expand Down
Loading