Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
5d90b35
Add multi-project support for more stats APIs
PeteGillinElastic May 2, 2025
2ceb353
Update test/external-modules/multi-project/build.gradle
PeteGillinElastic May 6, 2025
7cc0618
Respond to review comments
PeteGillinElastic May 6, 2025
e7ba94e
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 6, 2025
793c415
fix merge weirdness
PeteGillinElastic May 6, 2025
46d4aa4
[CI] Auto commit changes from spotless
May 6, 2025
73143f4
Fix test compilation following upstream change to base class
PeteGillinElastic May 6, 2025
39eced1
Update x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/…
PeteGillinElastic May 7, 2025
635fd65
Make projects-by-index map nullable and omit in single-project; alway…
PeteGillinElastic May 7, 2025
600bb6c
Add a TODO
PeteGillinElastic May 7, 2025
1df27b1
update IT to reflect changed behaviour
PeteGillinElastic May 7, 2025
084d605
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 7, 2025
6e1f8a7
Switch to using XContent.Params to indicate whether it is multi-proje…
PeteGillinElastic May 12, 2025
ba44688
Refactor NodesStatsMultiProjectIT to common up repeated assertions
PeteGillinElastic May 12, 2025
23aa344
Defer use of ProjectIdResolver in REST handlers to keep tests happy
PeteGillinElastic May 12, 2025
8ef9be4
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 12, 2025
d9a9a7f
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 15, 2025
7f60ef3
Include index UUID in "unknown project" case
PeteGillinElastic May 15, 2025
3bfafbf
Make the index-to-project map empty rather than null in the BWC deser…
PeteGillinElastic May 15, 2025
1810bc4
remove a TODO that is done, and add a comment
PeteGillinElastic May 15, 2025
c29f27d
fix typo
PeteGillinElastic May 15, 2025
1fcad79
Get REST YAML tests working with project ID prefix TODO finish this
PeteGillinElastic May 19, 2025
466b2a0
As a drive-by, fix and un-suppress one of the health REST tests
PeteGillinElastic May 19, 2025
ad08d50
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 19, 2025
e0fab44
[CI] Auto commit changes from spotless
May 19, 2025
43cde05
TODO ugh
PeteGillinElastic May 19, 2025
7ad66ab
Experiment with different stashing behaviour
PeteGillinElastic May 19, 2025
7d16998
[CI] Auto commit changes from spotless
May 19, 2025
9d09783
Try a more sensible stash behaviour for assertions
PeteGillinElastic May 19, 2025
8770871
clarify comment
PeteGillinElastic May 19, 2025
3b3b02c
Make checkstyle happy
PeteGillinElastic May 19, 2025
dab57e6
Make the way `Assertion` works more consistent, and simplify implemen…
PeteGillinElastic May 20, 2025
918c1f3
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 20, 2025
a95a3d2
[CI] Auto commit changes from spotless
May 20, 2025
d353627
In RestNodesStatsAction, make the XContent params to channel.request(…
PeteGillinElastic May 21, 2025
5c50366
Merge remote-tracking branch 'upstream/main' into ES-10063-mp-stats
PeteGillinElastic May 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.internal.Requests;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.ProjectId;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
Expand Down Expand Up @@ -112,7 +113,12 @@ public void testFailureInConditionalProcessor() {
NodesStatsResponse r = clusterAdmin().prepareNodesStats(internalCluster().getNodeNames()).setIngest(true).get();
int nodeCount = r.getNodes().size();
for (int k = 0; k < nodeCount; k++) {
List<IngestStats.ProcessorStat> stats = r.getNodes().get(k).getIngestStats().processorStats().get(pipelineId);
List<IngestStats.ProcessorStat> stats = r.getNodes()
.get(k)
.getIngestStats()
.processorStats()
.get(ProjectId.DEFAULT)
.get(pipelineId);
for (IngestStats.ProcessorStat st : stats) {
assertThat(st.stats().ingestCurrent(), greaterThanOrEqualTo(0L));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,22 +85,22 @@ teardown:
- gte: { ingest.total.failed: 0 }

# Pipelines section
- is_true: ingest.pipelines.ingest_info_pipeline
- gte: { ingest.pipelines.ingest_info_pipeline.count: 2 }
- gte: { ingest.pipelines.ingest_info_pipeline.time_in_millis: 0 }
- match: { ingest.pipelines.ingest_info_pipeline.current: 0 }
- match: { ingest.pipelines.ingest_info_pipeline.failed: 0 }
- gt: { ingest.pipelines.ingest_info_pipeline.ingested_as_first_pipeline_in_bytes: 0 }
- gt: { ingest.pipelines.ingest_info_pipeline.produced_as_first_pipeline_in_bytes: 0 }
- is_true: "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline"
- gte: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.count": 2 }
- gte: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.time_in_millis": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.current": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.failed": 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.ingested_as_first_pipeline_in_bytes": 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.produced_as_first_pipeline_in_bytes": 0 }

# Processors section
- is_true: ingest.pipelines.ingest_info_pipeline.processors.0.set
- match: { ingest.pipelines.ingest_info_pipeline.processors.0.set.type: "set" }
- is_true: ingest.pipelines.ingest_info_pipeline.processors.0.set.stats
- gte: { ingest.pipelines.ingest_info_pipeline.processors.0.set.stats.count: 2 }
- gte: { ingest.pipelines.ingest_info_pipeline.processors.0.set.stats.time_in_millis: 0 }
- match: { ingest.pipelines.ingest_info_pipeline.processors.0.set.stats.current: 0 }
- match: { ingest.pipelines.ingest_info_pipeline.processors.0.set.stats.failed: 0 }
- is_true: "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set"
- match: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.type": "set" }
- is_true: "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.stats"
- gte: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.stats.count": 2 }
- gte: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.stats.time_in_millis": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.stats.current": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}ingest_info_pipeline.processors.0.set.stats.failed": 0 }

---
"Test bytes_produced not increased when pipeline fails":
Expand Down Expand Up @@ -128,9 +128,9 @@ teardown:
- do:
cluster.info:
target: [ ingest ]
- match: { ingest.pipelines.pipeline-1.failed: 1 }
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- match: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.failed": 1 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": 0 }

---
"Test drop processor":
Expand All @@ -156,8 +156,8 @@ teardown:
- do:
cluster.info:
target: [ ingest ]
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- match: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": 0 }

---
"Test that pipeline processor has byte stats recorded in first pipeline":
Expand Down Expand Up @@ -210,11 +210,11 @@ teardown:
- do:
cluster.info:
target: [ ingest ]
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- set: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: ingest_bytes }
- gt: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: $ingest_bytes }
- match: { ingest.pipelines.pipeline-2.ingested_as_first_pipeline_in_bytes: 0 }
- match: { ingest.pipelines.pipeline-2.produced_as_first_pipeline_in_bytes: 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- set: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": ingest_bytes }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": $ingest_bytes }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.ingested_as_first_pipeline_in_bytes": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.produced_as_first_pipeline_in_bytes": 0 }

---
"Test that final pipeline has byte stats recorded in first pipeline":
Expand Down Expand Up @@ -262,11 +262,11 @@ teardown:
- do:
cluster.info:
target: [ ingest ]
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- set: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: ingest_bytes }
- gt: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: $ingest_bytes }
- match: { ingest.pipelines.pipeline-2.ingested_as_first_pipeline_in_bytes: 0 }
- match: { ingest.pipelines.pipeline-2.produced_as_first_pipeline_in_bytes: 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- set: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": ingest_bytes }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": $ingest_bytes }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.ingested_as_first_pipeline_in_bytes": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.produced_as_first_pipeline_in_bytes": 0 }

---
"Test that reroute processor has byte stats recorded in first pipeline":
Expand Down Expand Up @@ -327,11 +327,11 @@ teardown:
- do:
cluster.info:
target: [ ingest ]
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- set: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: ingest_bytes }
- gt: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: $ingest_bytes }
- match: { ingest.pipelines.pipeline-2.ingested_as_first_pipeline_in_bytes: 0 }
- match: { ingest.pipelines.pipeline-2.produced_as_first_pipeline_in_bytes: 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- set: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": ingest_bytes }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": $ingest_bytes }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.ingested_as_first_pipeline_in_bytes": 0 }
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-2.produced_as_first_pipeline_in_bytes": 0 }

---
"Test human readable byte stat fields":
Expand Down Expand Up @@ -360,8 +360,8 @@ teardown:
target: [ ingest ]
human: true

- match: { ingest.pipelines.pipeline-1.count: 1 }
- gt: { ingest.pipelines.pipeline-1.ingested_as_first_pipeline_in_bytes: 0 }
- gt: { ingest.pipelines.pipeline-1.produced_as_first_pipeline_in_bytes: 0 }
- is_true: ingest.pipelines.pipeline-1.ingested_as_first_pipeline
- is_true: ingest.pipelines.pipeline-1.produced_as_first_pipeline
- match: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.count": 1 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline_in_bytes": 0 }
- gt: { "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline_in_bytes": 0 }
- is_true: "ingest.pipelines.${_project_id_prefix_}pipeline-1.ingested_as_first_pipeline"
- is_true: "ingest.pipelines.${_project_id_prefix_}pipeline-1.produced_as_first_pipeline"
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,10 @@ teardown:
- gte: {nodes.$master.ingest.total.failed: 0}
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
- match: {nodes.$master.ingest.total.current: 0}
- gte: {nodes.$master.ingest.pipelines.pipeline1.count: 0}
- match: {nodes.$master.ingest.pipelines.pipeline1.failed: 0}
- gte: {nodes.$master.ingest.pipelines.pipeline1.time_in_millis: 0}
- match: {nodes.$master.ingest.pipelines.pipeline1.current: 0}
- gte: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline1.count": 0}
- match: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline1.failed": 0}
- gte: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline1.time_in_millis": 0}
- match: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline1.current": 0}

---
"Test bulk request with default pipeline":
Expand Down Expand Up @@ -124,10 +124,10 @@ teardown:
- gte: {nodes.$master.ingest.total.failed: 0}
- gte: {nodes.$master.ingest.total.time_in_millis: 0}
- match: {nodes.$master.ingest.total.current: 0}
- gte: {nodes.$master.ingest.pipelines.pipeline2.count: 0}
- match: {nodes.$master.ingest.pipelines.pipeline2.failed: 0}
- gte: {nodes.$master.ingest.pipelines.pipeline2.time_in_millis: 0}
- match: {nodes.$master.ingest.pipelines.pipeline2.current: 0}
- gte: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline2.count": 0}
- match: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline2.failed": 0}
- gte: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline2.time_in_millis": 0}
- match: { "nodes.$master.ingest.pipelines.${_project_id_prefix_}pipeline2.current": 0}

- do:
get:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@
- length: { indicators.shards_availability.diagnosis: 1 }
- is_true: indicators.shards_availability.diagnosis.0.affected_resources
- length: { indicators.shards_availability.diagnosis.0.affected_resources: 1 }
- match: { indicators.shards_availability.diagnosis.0.affected_resources.indices.0: "red_index" }
- match: { indicators.shards_availability.diagnosis.0.affected_resources.indices.0: "${_project_id_prefix_}red_index" }
Original file line number Diff line number Diff line change
Expand Up @@ -503,9 +503,9 @@
- gte: { nodes.$node_id.indices.mappings.total_count: 28 }
- is_true: nodes.$node_id.indices.mappings.total_estimated_overhead
- gte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 26624 }
- match: { nodes.$node_id.indices.indices.index1.mappings.total_count: 28 }
- is_true: nodes.$node_id.indices.indices.index1.mappings.total_estimated_overhead
- match: { nodes.$node_id.indices.indices.index1.mappings.total_estimated_overhead_in_bytes: 28672 }
- match: { "nodes.$node_id.indices.indices.${_project_id_prefix_}index1.mappings.total_count": 28 }
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hey @PeteGillinElastic,
I have a qq, why here we expect the mappings.total_count to be equal to 28 while above, on line 503 we have mappings.total_count greater or equal (gte) to 28?
Asking because we have a test failure #120950 where we got 29 instead of 28. Am I missing something or can I create a fix with gte instead of match?
Thanks!

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think I'm the right person to help here. I didn't write these assertions, I just tweaked the JSON paths to reflect the different structure of the response in multi-project mode. It looks like they were introduced in #110676. So maybe @dnhatn is better placed to answer this?

- is_true: "nodes.$node_id.indices.indices.${_project_id_prefix_}index1.mappings.total_estimated_overhead"
- match: { "nodes.$node_id.indices.indices.${_project_id_prefix_}index1.mappings.total_estimated_overhead_in_bytes": 28672 }

---
"Lucene segment level fields stats":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.ProjectId;
import org.elasticsearch.common.Strings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptEngine;
Expand Down Expand Up @@ -109,7 +110,10 @@ public void testIngestStatsNamesAndTypes() throws IOException {
assertThat(pipelineStat.pipelineId(), equalTo("pipeline1"));
assertThat(pipelineStat.stats().ingestCount(), equalTo(1L));

List<IngestStats.ProcessorStat> processorStats = stats.getIngestStats().processorStats().get("pipeline1");
List<IngestStats.ProcessorStat> processorStats = stats.getIngestStats()
.processorStats()
.get(ProjectId.DEFAULT)
.get("pipeline1");
assertThat(processorStats.size(), equalTo(4));

IngestStats.ProcessorStat setA = processorStats.get(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ static TransportVersion def(int id) {
public static final TransportVersion ESQL_TIME_SERIES_SOURCE_STATUS = def(9_076_0_00);
public static final TransportVersion ESQL_HASH_OPERATOR_STATUS_OUTPUT_TIME = def(9_077_0_00);
public static final TransportVersion ML_INFERENCE_HUGGING_FACE_CHAT_COMPLETION_ADDED = def(9_078_0_00);
public static final TransportVersion NODES_STATS_SUPPORTS_MULTI_PROJECT = def(9_079_0_00);

/*
* STOP! READ THIS FIRST! No, really,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -846,7 +846,7 @@ public void initRestHandlers(Supplier<DiscoveryNodes> nodesInCluster, Predicate<
registerHandler.accept(new RestNodesInfoAction(settingsFilter));
registerHandler.accept(new RestRemoteClusterInfoAction());
registerHandler.accept(new RestNodesCapabilitiesAction());
registerHandler.accept(new RestNodesStatsAction());
registerHandler.accept(new RestNodesStatsAction(projectIdResolver));
registerHandler.accept(new RestNodesUsageAction());
registerHandler.accept(new RestNodesHotThreadsAction());
registerHandler.accept(new RestClusterAllocationExplainAction());
Expand Down Expand Up @@ -981,7 +981,7 @@ public void initRestHandlers(Supplier<DiscoveryNodes> nodesInCluster, Predicate<
registerHandler.accept(new RestShardsAction());
registerHandler.accept(new RestMasterAction());
registerHandler.accept(new RestNodesAction());
registerHandler.accept(new RestClusterInfoAction());
registerHandler.accept(new RestClusterInfoAction(projectIdResolver));
registerHandler.accept(new RestTasksAction(nodesInCluster));
registerHandler.accept(new RestIndicesAction(projectIdResolver));
registerHandler.accept(new RestSegmentsAction());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@
*/
public class NodeStats extends BaseNodeResponse implements ChunkedToXContent {

public static final String MULTI_PROJECT_ENABLED_XCONTENT_PARAM_KEY = "multi_project_enabled_node_stats";

private final long timestamp;

@Nullable
Expand Down
Loading